Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
Bin He
openairinterface5G
Commits
fe330f7b
Commit
fe330f7b
authored
Mar 21, 2021
by
Raphael Defosseux
Browse files
Merge branch 'ci-new-docker-pipeline' into 'develop'
Ci new docker pipeline See merge request
oai/openairinterface5g!1095
parents
3dc0a098
6f262290
Changes
15
Hide whitespace changes
Inline
Side-by-side
ci-scripts/Jenkinsfile-GitLab-Container
0 → 100644
View file @
fe330f7b
#
!
/bin/
groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
// Location of the executor node
def
nodeExecutor
=
params
.
nodeExecutor
// Tags to shorten pipeline duration
def
doMandatoryTests
=
false
def
doFullTestsuite
=
false
pipeline
{
agent
{
label
nodeExecutor
}
options
{
disableConcurrentBuilds
()
timestamps
()
gitLabConnection
(
'OAI GitLab'
)
ansiColor
(
'xterm'
)
}
stages
{
stage
(
"Verify Parameters"
)
{
steps
{
script
{
JOB_TIMESTAMP
=
sh
returnStdout:
true
,
script:
'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
JOB_TIMESTAMP
=
JOB_TIMESTAMP
.
trim
()
echo
'\u2705 \u001B[32mVerify Parameters\u001B[0m'
def
allParametersPresent
=
true
echo
'\u2705 \u001B[32mVerify Labels\u001B[0m'
if
(
"MERGE"
.
equals
(
env
.
gitlabActionType
))
{
LABEL_CHECK
=
sh
returnStdout:
true
,
script:
'ci-scripts/checkGitLabMergeRequestLabels.sh --mr-id '
+
env
.
gitlabMergeRequestIid
LABEL_CHECK
=
LABEL_CHECK
.
trim
()
if
(
LABEL_CHECK
==
'NONE'
)
{
def
message
=
"OAI "
+
JOB_NAME
+
" build ("
+
BUILD_ID
+
"): Your merge request has none of the mandatory labels:\n\n"
message
+=
" - BUILD-ONLY\n"
message
+=
" - 4G-LTE\n"
message
+=
" - 5G-NR\n"
message
+=
" - CI\n\n"
message
+=
"Not performing CI due to lack of labels"
addGitLabMRComment
comment:
message
error
(
'Not performing CI due to lack of labels'
)
}
else
if
(
LABEL_CHECK
==
'FULL'
)
{
doMandatoryTests
=
true
doFullTestsuite
=
true
}
else
if
(
LABEL_CHECK
==
'SHORTEN-5G'
)
{
doMandatoryTests
=
true
}
else
{
def
message
=
"OAI "
+
JOB_NAME
+
" build ("
+
BUILD_ID
+
"): We will perform only build stages on your Merge Request"
addGitLabMRComment
comment:
message
}
}
else
{
doMandatoryTests
=
true
doFullTestsuite
=
true
}
}
}
}
stage
(
"Verify Guidelines"
)
{
steps
{
echo
"Git URL is ${GIT_URL}"
echo
"GitLab Act is ${env.gitlabActionType}"
script
{
if
(
"MERGE"
.
equals
(
env
.
gitlabActionType
))
{
// since a bit, in push events, gitlabUserEmail is not populated
gitCommitAuthorEmailAddr
=
env
.
gitlabUserEmail
echo
"GitLab Usermail is ${gitCommitAuthorEmailAddr}"
// GitLab-Jenkins plugin integration is lacking to perform the merge by itself
// Doing it manually --> it may have merge conflicts
sh
"./ci-scripts/doGitLabMerge.sh --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}"
}
else
{
echo
"Git Branch is ${GIT_BRANCH}"
echo
"Git Commit is ${GIT_COMMIT}"
// since a bit, in push events, gitlabUserEmail is not populated
gitCommitAuthorEmailAddr
=
sh
returnStdout:
true
,
script:
'git log -n1 --pretty=format:%ae ${GIT_COMMIT}'
gitCommitAuthorEmailAddr
=
gitCommitAuthorEmailAddr
.
trim
()
echo
"GitLab Usermail is ${gitCommitAuthorEmailAddr}"
sh
"git log -n1 --pretty=format:\"%s\" > .git/CI_COMMIT_MSG"
}
}
}
post
{
failure
{
script
{
def
message
=
"OAI "
+
JOB_NAME
+
" build ("
+
BUILD_ID
+
"): Merge Conflicts -- Cannot perform CI"
addGitLabMRComment
comment:
message
currentBuild
.
result
=
'FAILURE'
}
}
}
}
// Build Stages are Mandatory
// Later we will add a Ubuntu20 build
stage
(
"Image Building Processes"
)
{
parallel
{
stage
(
"Ubuntu18 Build"
)
{
steps
{
script
{
triggerSlaveJob
(
'RAN-Ubuntu18-Image-Builder'
,
'Ubuntu18-Images-Build'
)
}
}
post
{
always
{
script
{
finalizeSlaveJob
(
'RAN-Ubuntu18-Image-Builder'
)
}
}
failure
{
script
{
currentBuild
.
result
=
'FAILURE'
}
}
}
}
stage
(
"RHEL8 Build"
)
{
steps
{
script
{
triggerSlaveJob
(
'RAN-RHEL8-Image-Builder'
,
'RHEL8-Images-Build'
)
}
}
post
{
always
{
script
{
finalizeSlaveJob
(
'RAN-RHEL8-Image-Builder'
)
}
}
failure
{
script
{
currentBuild
.
result
=
'FAILURE'
}
}
}
}
}
}
}
post
{
always
{
script
{
emailext
attachmentsPattern:
'*results*.html'
,
body:
'''Hi,
Here are attached HTML report files for $PROJECT_NAME - Build # $BUILD_NUMBER - $BUILD_STATUS!
Regards,
OAI CI Team'''
,
replyTo:
'no-reply@openairinterface.org'
,
subject:
'$PROJECT_NAME - Build # $BUILD_NUMBER - $BUILD_STATUS!'
,
to:
gitCommitAuthorEmailAddr
if
(
fileExists
(
'.git/CI_COMMIT_MSG'
))
{
sh
"rm -f .git/CI_COMMIT_MSG"
}
}
}
success
{
script
{
def
message
=
"OAI "
+
JOB_NAME
+
" build ("
+
BUILD_ID
+
"): passed ("
+
BUILD_URL
+
")"
if
(
"MERGE"
.
equals
(
env
.
gitlabActionType
))
{
echo
"This is a MERGE event"
addGitLabMRComment
comment:
message
}
}
}
failure
{
script
{
def
message
=
"OAI "
+
JOB_NAME
+
" build ("
+
BUILD_ID
+
"): failed ("
+
BUILD_URL
+
")"
if
(
"MERGE"
.
equals
(
env
.
gitlabActionType
))
{
echo
"This is a MERGE event"
addGitLabMRComment
comment:
message
}
}
}
}
}
// ---- Slave Job functions
def
triggerSlaveJob
(
jobName
,
gitlabStatusName
)
{
// Workaround for the "cancelled" GitLab pipeline notification
// The slave job is triggered with the propagate false so the following commands are executed
// Its status is now PASS/SUCCESS from a stage pipeline point of view
// localStatus variable MUST be analyzed to properly assess the status
localStatus
=
build
job:
jobName
,
parameters:
[
string
(
name:
'eNB_Repository'
,
value:
String
.
valueOf
(
GIT_URL
)),
string
(
name:
'eNB_Branch'
,
value:
String
.
valueOf
(
env
.
gitlabSourceBranch
)),
string
(
name:
'eNB_CommitID'
,
value:
String
.
valueOf
(
env
.
gitlabMergeRequestLastCommit
)),
booleanParam
(
name:
'eNB_mergeRequest'
,
value:
"MERGE"
.
equals
(
env
.
gitlabActionType
)),
string
(
name:
'eNB_TargetBranch'
,
value:
String
.
valueOf
(
env
.
gitlabTargetBranch
))
],
propagate:
false
localResult
=
localStatus
.
getResult
()
echo
"${jobName} Slave Job status is ${localResult}"
gitlabCommitStatus
(
name:
gitlabStatusName
)
{
if
(
localStatus
.
resultIsBetterOrEqualTo
(
'SUCCESS'
))
{
echo
"${jobName} Slave Job is OK"
}
else
{
echo
"${jobName} Slave Job is KO"
sh
"ci-scripts/fail.sh"
}
}
}
def
triggerSlaveJobNoGitLab
(
jobName
)
{
// Workaround for the "cancelled" GitLab pipeline notification
// The slave job is triggered with the propagate false so the following commands are executed
// Its status is now PASS/SUCCESS from a stage pipeline point of view
// localStatus variable MUST be analyzed to properly assess the status
localStatus
=
build
job:
jobName
,
parameters:
[
string
(
name:
'eNB_Repository'
,
value:
String
.
valueOf
(
GIT_URL
)),
string
(
name:
'eNB_Branch'
,
value:
String
.
valueOf
(
env
.
gitlabSourceBranch
)),
string
(
name:
'eNB_CommitID'
,
value:
String
.
valueOf
(
env
.
gitlabMergeRequestLastCommit
)),
booleanParam
(
name:
'eNB_mergeRequest'
,
value:
"MERGE"
.
equals
(
env
.
gitlabActionType
)),
string
(
name:
'eNB_TargetBranch'
,
value:
String
.
valueOf
(
env
.
gitlabTargetBranch
))
],
propagate:
false
localResult
=
localStatus
.
getResult
()
echo
"${jobName} Slave Job status is ${localResult}"
if
(
localStatus
.
resultIsBetterOrEqualTo
(
'SUCCESS'
))
{
echo
"${jobName} Slave Job is OK"
}
else
{
echo
"${jobName} Slave Job is KO"
sh
"ci-scripts/fail.sh"
}
}
def
finalizeSlaveJob
(
jobName
)
{
// In case of any non-success, we are retrieving the HTML report of the last completed
// slave job. The only drop-back is that we may retrieve the HTML report of a previous build
fileName
=
"test_results-${jobName}.html"
if
(!
fileExists
(
fileName
))
{
copyArtifacts
(
projectName:
jobName
,
filter:
'test_results*.html'
,
selector:
lastCompleted
())
if
(
fileExists
(
fileName
))
{
sh
"sed -i -e 's#TEMPLATE_BUILD_TIME#${JOB_TIMESTAMP}#' ${fileName}"
archiveArtifacts
artifacts:
fileName
}
}
}
ci-scripts/Jenkinsfile-tmp-ran
View file @
fe330f7b
...
...
@@ -241,6 +241,10 @@ pipeline {
stage
(
"Terminate"
)
{
parallel
{
stage
(
'Terminate UE'
)
{
// Bypassing this stage if there are no abd server defined
when
{
expression
{
params
.
ADB_IPAddress
!=
"none"
}
}
steps
{
echo
'\u2705 \u001B[32mTerminate UE\u001B[0m'
withCredentials
([
...
...
@@ -275,6 +279,10 @@ pipeline {
}
}
stage
(
'Terminate SPGW'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
echo
'\u2705 \u001B[32mTerminate SPGW\u001B[0m'
withCredentials
([
...
...
@@ -292,6 +300,10 @@ pipeline {
}
}
stage
(
'Terminate MME'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
echo
'\u2705 \u001B[32mTerminate MME\u001B[0m'
withCredentials
([
...
...
@@ -309,6 +321,10 @@ pipeline {
}
}
stage
(
'Terminate HSS'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
echo
'\u2705 \u001B[32mTerminate HSS\u001B[0m'
withCredentials
([
...
...
@@ -371,6 +387,10 @@ pipeline {
}
}
stage
(
'Log Collection (SPGW)'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
...
...
@@ -389,6 +409,10 @@ pipeline {
}
}
stage
(
'Log Collection (MME)'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
...
...
@@ -407,6 +431,10 @@ pipeline {
}
}
stage
(
'Log Collection (HSS)'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
...
...
@@ -425,6 +453,10 @@ pipeline {
}
}
stage
(
'Log Collection (Ping)'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
...
...
@@ -443,6 +475,10 @@ pipeline {
}
}
stage
(
'Log Collection (Iperf)'
)
{
// Bypassing this stage if EPC server is not defined
when
{
expression
{
params
.
EPC_IPAddress
!=
"none"
}
}
steps
{
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
...
...
@@ -482,7 +518,7 @@ pipeline {
// Making sure that we really shutdown every thing before leaving
failure
{
script
{
if
(!
termStatusArray
[
termUE
])
{
if
(
(!
termStatusArray
[
termUE
])
&&
(
params
.
ADB_IPAddress
!=
"none"
))
{
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.ADB_Credentials}"
,
usernameVariable:
'ADB_Username'
,
passwordVariable:
'ADB_Password'
]
])
{
...
...
@@ -496,21 +532,21 @@ pipeline {
sh
"python3 ci-scripts/main.py --mode=TerminateeNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}"
}
}
if
(!
termStatusArray
[
termSPGW
])
{
if
(
(!
termStatusArray
[
termSPGW
])
&&
(
params
.
EPC_IPAddress
!=
"none"
))
{
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
])
{
sh
"python3 ci-scripts/main.py --mode=TerminateSPGW --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
}
}
if
(!
termStatusArray
[
termMME
])
{
if
(
(!
termStatusArray
[
termMME
])
&&
(
params
.
EPC_IPAddress
!=
"none"
))
{
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
])
{
sh
"python3 ci-scripts/main.py --mode=TerminateMME --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
}
}
if
(!
termStatusArray
[
termHSS
])
{
if
(
(!
termStatusArray
[
termHSS
])
&&
(
params
.
EPC_IPAddress
!=
"none"
))
{
withCredentials
([
[
$class
:
'UsernamePasswordMultiBinding'
,
credentialsId:
"${params.EPC_Credentials}"
,
usernameVariable:
'EPC_Username'
,
passwordVariable:
'EPC_Password'
]
])
{
...
...
ci-scripts/cls_containerize.py
View file @
fe330f7b
...
...
@@ -35,6 +35,7 @@ import sys # arg
import
re
# reg
import
logging
import
os
import
shutil
import
time
from
multiprocessing
import
Process
,
Lock
,
SimpleQueue
from
zipfile
import
ZipFile
...
...
@@ -124,7 +125,7 @@ class Containerize():
self
.
cli
=
'docker'
self
.
dockerfileprefix
=
'.ubuntu18'
elif
self
.
host
==
'Red Hat'
:
self
.
cli
=
'podman'
self
.
cli
=
'
sudo
podman'
self
.
dockerfileprefix
=
'.rhel8.2'
imageNames
=
[]
...
...
@@ -173,6 +174,7 @@ class Containerize():
# if the branch is not develop, then it is a merge request and we need to do
# the potential merge. Note that merge conflicts should already been checked earlier
imageTag
=
'develop'
sharedTag
=
'develop'
if
(
self
.
ranAllowMerge
):
imageTag
=
'ci-temp'
if
self
.
ranTargetBranch
==
''
:
...
...
@@ -188,38 +190,23 @@ class Containerize():
mySSH
.
command
(
'mkdir -p tmp/entitlement/'
,
'\$'
,
5
)
mySSH
.
command
(
'sudo cp /etc/rhsm/ca/redhat-uep.pem tmp/ca/'
,
'\$'
,
5
)
mySSH
.
command
(
'sudo cp /etc/pki/entitlement/*.pem tmp/entitlement/'
,
'\$'
,
5
)
#mySSH.close()
#return 0
sharedimage
=
'ran-build'
# Let's remove any previous run artifacts if still there
mySSH
.
command
(
self
.
cli
+
' image prune --force'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' image rm '
+
sharedimage
+
':'
+
imageTag
,
'\$'
,
5
)
for
image
,
pattern
in
imageNames
:
mySSH
.
command
(
self
.
cli
+
' image rm '
+
image
+
':'
+
imageTag
,
'\$'
,
5
)
# Build the shared image
mySSH
.
command
(
self
.
cli
+
' build --target '
+
sharedimage
+
' --tag '
+
sharedimage
+
':'
+
imageTag
+
' --file docker/Dockerfile.ran'
+
self
.
dockerfileprefix
+
' --build-arg NEEDED_GIT_PROXY="http://proxy.eurecom.fr:8080" . > cmake_targets/log/ran-build.log 2>&1'
,
'\$'
,
1600
)
# Build the target image(s)
previousImage
=
sharedimage
+
':'
+
imageTag
danglingShaOnes
=
[]
mySSH
.
command
(
self
.
cli
+
' image prune --force'
,
'\$'
,
30
)
if
(
not
self
.
ranAllowMerge
):
mySSH
.
command
(
self
.
cli
+
' image rm '
+
sharedimage
+
':'
+
sharedTag
,
'\$'
,
30
)
for
image
,
pattern
in
imageNames
:
# the archived Dockerfiles have "ran-build:latest" as base image
# we need to update them with proper tag
mySSH
.
command
(
'sed -i -e "s#'
+
sharedimage
+
':latest#'
+
sharedimage
+
':'
+
imageTag
+
'#" docker/Dockerfile.'
+
pattern
+
self
.
dockerfileprefix
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' build --target '
+
image
+
' --tag '
+
image
+
':'
+
imageTag
+
' --file docker/Dockerfile.'
+
pattern
+
self
.
dockerfileprefix
+
' . > cmake_targets/log/'
+
image
+
'.log 2>&1'
,
'\$'
,
1200
)
# Retrieving the dangling image(s) for the log collection
mySSH
.
command
(
self
.
cli
+
' images --filter "dangling=true" --filter "since='
+
previousImage
+
'" -q | sed -e "s#^#sha=#"'
,
'\$'
,
5
)
result
=
re
.
search
(
'sha=(?P<imageShaOne>[a-zA-Z0-9\-\_]+)'
,
mySSH
.
getBefore
())
if
result
is
not
None
:
danglingShaOnes
.
append
((
image
,
result
.
group
(
'imageShaOne'
)))
previousImage
=
image
+
':'
+
imageTag
mySSH
.
command
(
self
.
cli
+
' image rm '
+
image
+
':'
+
imageTag
,
'\$'
,
30
)
imageTag
=
'ci-temp'
# First verify if images were properly created.
# Build the shared image only on Push Events (not on Merge Requests)
if
(
not
self
.
ranAllowMerge
):
mySSH
.
command
(
self
.
cli
+
' build --target '
+
sharedimage
+
' --tag '
+
sharedimage
+
':'
+
sharedTag
+
' --file docker/Dockerfile.ran'
+
self
.
dockerfileprefix
+
' --build-arg NEEDED_GIT_PROXY="http://proxy.eurecom.fr:8080" . > cmake_targets/log/ran-build.log 2>&1'
,
'\$'
,
1600
)
# First verify if the shared image was properly created.
status
=
True
mySSH
.
command
(
self
.
cli
+
' image inspect --format=
\'
Size = {{.Size}} bytes
\'
'
+
sharedimage
+
':'
+
image
Tag
,
'\$'
,
5
)
if
mySSH
.
getBefore
().
count
(
'
N
o such
object
'
)
!=
0
:
logging
.
error
(
'Could not build properly ran-build'
)
mySSH
.
command
(
self
.
cli
+
' image inspect --format=
\'
Size = {{.Size}} bytes
\'
'
+
sharedimage
+
':'
+
shared
Tag
,
'\$'
,
5
)
if
mySSH
.
getBefore
().
count
(
'o such
image
'
)
!=
0
:
logging
.
error
(
'
\u001B
[1m
Could not build properly ran-build
\u001B
[0m
'
)
status
=
False
else
:
result
=
re
.
search
(
'Size *= *(?P<size>[0-9\-]+) *bytes'
,
mySSH
.
getBefore
())
...
...
@@ -240,11 +227,40 @@ class Containerize():
self
.
allImagesSize
[
'ran-build'
]
=
str
(
round
(
imageSize
,
1
))
+
' Gbytes'
else
:
logging
.
debug
(
'ran-build size is unknown'
)
# If the shared image failed, no need to continue
if
not
status
:
# Recover the name of the failed container?
mySSH
.
command
(
self
.
cli
+
' ps --quiet --filter "status=exited" -n1 | xargs '
+
self
.
cli
+
' rm -f'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' image prune --force'
,
'\$'
,
30
)
mySSH
.
close
()
logging
.
error
(
'
\u001B
[1m Building OAI Images Failed
\u001B
[0m'
)
HTML
.
CreateHtmlTestRow
(
self
.
imageKind
,
'KO'
,
CONST
.
ALL_PROCESSES_OK
)
HTML
.
CreateHtmlTabFooter
(
False
)
sys
.
exit
(
1
)
else
:
# Recover build logs, for the moment only possible when build is successful
mySSH
.
command
(
self
.
cli
+
' create --name test '
+
sharedimage
+
':'
+
sharedTag
,
'\$'
,
5
)
mySSH
.
command
(
'mkdir -p cmake_targets/log/ran-build'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/ran-build'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' rm -f test'
,
'\$'
,
5
)
# Build the target image(s)
for
image
,
pattern
in
imageNames
:
# the archived Dockerfiles have "ran-build:latest" as base image
# we need to update them with proper tag
mySSH
.
command
(
'sed -i -e "s#'
+
sharedimage
+
':latest#'
+
sharedimage
+
':'
+
sharedTag
+
'#" docker/Dockerfile.'
+
pattern
+
self
.
dockerfileprefix
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' build --target '
+
image
+
' --tag '
+
image
+
':'
+
imageTag
+
' --file docker/Dockerfile.'
+
pattern
+
self
.
dockerfileprefix
+
' . > cmake_targets/log/'
+
image
+
'.log 2>&1'
,
'\$'
,
1200
)
# split the log
mySSH
.
command
(
'mkdir -p cmake_targets/log/'
+
image
,
'\$'
,
5
)
mySSH
.
command
(
'python3 ci-scripts/docker_log_split.py --logfilename=cmake_targets/log/'
+
image
+
'.log'
,
'\$'
,
5
)
# checking the status of the build
mySSH
.
command
(
self
.
cli
+
' image inspect --format=
\'
Size = {{.Size}} bytes
\'
'
+
image
+
':'
+
imageTag
,
'\$'
,
5
)
if
mySSH
.
getBefore
().
count
(
'
N
o such
object
'
)
!=
0
:
logging
.
error
(
'Could not build properly '
+
image
)
if
mySSH
.
getBefore
().
count
(
'o such
image
'
)
!=
0
:
logging
.
error
(
'
\u001B
[1m
Could not build properly '
+
image
+
'
\u001B
[0m'
)
status
=
False
# Here we should check if the last container corresponds to a failed command and destroy it
mySSH
.
command
(
self
.
cli
+
' ps --quiet --filter "status=exited" -n1 | xargs '
+
self
.
cli
+
' rm -f'
,
'\$'
,
5
)
self
.
allImagesSize
[
image
]
=
'N/A -- Build Failed'
else
:
result
=
re
.
search
(
'Size *= *(?P<size>[0-9\-]+) *bytes'
,
mySSH
.
getBefore
())
if
result
is
not
None
:
...
...
@@ -264,39 +280,27 @@ class Containerize():
self
.
allImagesSize
[
image
]
=
str
(
round
(
imageSize
,
1
))
+
' Gbytes'
else
:
logging
.
debug
(
'ran-build size is unknown'
)
if
not
status
:
mySSH
.
close
()
logging
.
error
(
'
\u001B
[1m Building OAI Images Failed
\u001B
[0m'
)
HTML
.
CreateHtmlTestRow
(
self
.
imageKind
,
'KO'
,
CONST
.
ALL_PROCESSES_OK
)
#HTML.CreateHtmlNextTabHeaderTestRow(self.collectInfo, self.allImagesSize)
HTML
.
CreateHtmlTabFooter
(
False
)
sys
.
exit
(
1
)
self
.
allImagesSize
[
image
]
=
'unknown'
# Now pruning dangling images in between target builds
mySSH
.
command
(
self
.
cli
+
' image prune --force'
,
'\$'
,
30
)
# Recover build logs, for the moment only possible when build is successful
mySSH
.
command
(
self
.
cli
+
' create --name test '
+
sharedimage
+
':'
+
imageTag
,
'\$'
,
5
)
mySSH
.
command
(
'mkdir -p cmake_targets/log/ran-build'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/ran-build'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' rm -f test'
,
'\$'
,
5
)
for
image
,
shaone
in
danglingShaOnes
:
mySSH
.
command
(
'mkdir -p cmake_targets/log/'
+
image
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' create --name test '
+
shaone
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/'
+
image
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' rm -f test'
,
'\$'
,
5
)
mySSH
.
command
(
self
.
cli
+
' image prune --force'
,
'\$'
,
5
)
mySSH
.
command
(
'cd cmake_targets'
,
'\$'
,
5
)
# Analyzing the logs
mySSH
.
command
(
'cd '
+
lSourcePath
+
'/cmake_targets'
,
'\$'
,
5
)
mySSH
.
command
(
'mkdir -p build_log_'
+
self
.
testCase_id
,
'\$'
,
5
)
mySSH
.
command
(
'mv log/* '
+
'build_log_'
+
self
.
testCase_id
,
'\$'
,
5
)
#mySSH.close()
mySSH
.
command
(
'
cd /tmp/CI-eNB/cmake_targets
'
,
'\$'
,
5
)
mySSH
.
command
(
'cd '
+
lSourcePath
+
'/cmake_targets'
,
'\$'
,
5
)
mySSH
.
command
(
'
rm -f build_log_'
+
self
.
testCase_id
+
'.zip || true
'
,
'\$'
,
5
)
if
(
os
.
path
.
isfile
(
'./build_log_'
+
self
.
testCase_id
+
'.zip'
)):
os
.
remove
(
'./build_log_'
+
self
.
testCase_id
+
'.zip'
)
if
(
os
.
path
.
isdir
(
'./build_log_'
+
self
.
testCase_id
)):
shutil
.
rmtree
(
'./build_log_'
+
self
.
testCase_id
)
mySSH
.
command
(
'zip -r -qq build_log_'
+
self
.
testCase_id
+
'.zip build_log_'
+
self
.
testCase_id
,
'\$'
,
5
)
mySSH
.
copyin
(
lIpAddr
,
lUserName
,
lPassWord
,
lSourcePath
+
'/cmake_targets/build_log_'
+
self
.
testCase_id
+
'.zip'
,
'.'
)
#
mySSH.command('rm -f build_log_' + self.testCase_id + '.zip','\$', 5)
mySSH
.
command
(
'rm -f build_log_'
+
self
.
testCase_id
+
'.zip'
,
'\$'
,
5
)
mySSH
.
close
()
ZipFile
(
'build_log_'
+
self
.
testCase_id
+
'.zip'
).
extractall
(
'.'
)
#Trying to identify the errors and warnings for each built images
imageNames1
=
imageNames
shared
=
(
'ran-build'
,
'ran'
)
...
...
@@ -328,9 +332,16 @@ class Containerize():
files
[
fil
]
=
errorandwarnings
self
.
collectInfo
[
image
]
=
files
logging
.
info
(
'
\u001B
[1m Building OAI Image(s) Pass
\u001B
[0m'
)
HTML
.
CreateHtmlTestRow
(
self
.
imageKind
,
'OK'
,
CONST
.
ALL_PROCESSES_OK
)
HTML
.
CreateHtmlNextTabHeaderTestRow
(
self
.
collectInfo
,
self
.
allImagesSize
)
if
status
:
logging
.
info
(
'
\u001B
[1m Building OAI Image(s) Pass
\u001B
[0m'
)
HTML
.
CreateHtmlTestRow
(
self
.
imageKind
,
'OK'
,
CONST
.
ALL_PROCESSES_OK
)
HTML
.
CreateHtmlNextTabHeaderTestRow
(
self
.
collectInfo
,
self
.
allImagesSize
)
else
:
logging
.
error
(
'
\u001B
[1m Building OAI Images Failed
\u001B
[0m'
)
HTML
.
CreateHtmlTestRow
(
self
.
imageKind
,
'KO'
,
CONST
.
ALL_PROCESSES_OK
)
HTML
.
CreateHtmlNextTabHeaderTestRow
(
self
.
collectInfo
,
self
.
allImagesSize
)
HTML
.
CreateHtmlTabFooter
(
False
)
sys
.
exit
(
1
)
def
DeployObject
(
self
,
HTML
,
EPC
):
if
self
.
eNB_serverId
[
self
.
eNB_instance
]
==
'0'
:
...
...
ci-scripts/docker_log_split.py
0 → 100644
View file @
fe330f7b
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------