Skip to content
Snippets Groups Projects
Commit 2a257b24 authored by Raphael Defosseux's avatar Raphael Defosseux
Browse files

Merge remote-tracking branch 'origin/master' into iperf-pod

parents 9c3ec7d6 c96a615d
No related branches found
No related tags found
1 merge request!111feat: traffic server charts
This commit is part of merge request !111. Comments created here will be created in the context of that merge request.
Showing
with 50 additions and 1594 deletions
......@@ -31,3 +31,6 @@
[submodule "component/oai-pcf"]
path = component/oai-pcf
url = https://gitlab.eurecom.fr/oai/cn5g/oai-cn5g-pcf.git
[submodule "ci-scripts/common"]
path = ci-scripts/common
url = https://gitlab.eurecom.fr/oai/cn5g/oai-cn5g-common-ci.git
......@@ -87,7 +87,10 @@ pipeline {
steps {
script {
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
sh "git clean -x -d -f > /dev/null 2>&1"
sh "git clean -x -d -ff > /dev/null 2>&1"
sh "git submodule foreach --recursive 'git clean -x -d -ff' > /dev/null 2>&1"
sh "git submodule deinit --force --all > /dev/null 2>&1"
sh "git submodule update --init --recursive ci-scripts/common"
sh "mkdir -p archives"
for (ii = 0; ii < imageNames.size(); ii++) {
if (ii == NRF) {
......@@ -546,6 +549,14 @@ def retrieveLogsFromPods() {
echo "Getting logs from ${podName} failed"
}
}
// Retrieving mysql logs
podName = sh returnStdout: true, script: "oc get pods | grep mysql | awk {'print \$1'} || true"
podName = podName.trim()
try {
sh "oc logs ${podName} &> archives/mysql.logs"
} catch (Exception e) {
echo "Getting logs from ${podName} failed"
}
sh "oc describe pod &> archives/describe-pods-post-test.logs"
sh "oc get pods.metrics.k8s.io &> archives/nf-resource-consumption.log"
if (nrfPod.contains("oai-nrf")) {
......
This diff is collapsed.
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
//-------------------------------------------------------------------------------
// Abstraction function to send social media messages:
// like on Slack or Mattermost
def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) {
if (params.pipelineUsesSlack != null) {
if (params.pipelineUsesSlack) {
slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage
}
}
}
// Location of the CN executor node
def cn_ci_host = params.Host_CN_CI_Server
// for lock
def cn_ci_resource = params.DockerContainers
def ds_tester_ci_resource = params.DsTester
// When triggered by upstream, specify which tag to use
def upstreamTagToUse = params.upstreamTagToUse
// Location of the CN tester
def dsT_host_flag = false
def dsT_host = ""
def dsT_host_user = ""
// dsTester tag to use
def dsTesterTag = params.DSTESTER_TAG
// Flags
def scmEvent = false
def upstreamEvent = false
def deployed = true
// Default tags / branches --> could be passed on by upstream job or by PR content
def nrfTag = params.nrfTag
def nrfBranch = params.nrfBranch
def amfTag = params.amfTag
def amfBranch = params.amfBranch
def smfTag = params.smfTag
def smfBranch = params.smfBranch
def spgwuTag = params.spgwuTag
def spgwuBranch = params.spgwuBranch
def ausfTag = params.ausfTag
def ausfBranch = params.ausfBranch
def udmTag = params.udmTag
def udmBranch = params.udmBranch
def udrTag = params.udrTag
def udrBranch = params.udrBranch
//-------------------------------------------------------------------------------
// Pipeline start
pipeline {
agent {
label cn_ci_host
}
options {
disableConcurrentBuilds()
timestamps()
ansiColor('xterm')
lock(cn_ci_resource)
}
stages {
stage ('Verify Parameters') {
steps {
script {
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
JOB_TIMESTAMP = JOB_TIMESTAMP.trim()
echo "Node is ${NODE_NAME}"
if (params.DS_Tester_Server_Flag != null) {
dsT_host_flag = params.DS_Tester_Server_Flag
if (dsT_host_flag) {
def allParametersPresent = true
if (params.DS_Tester_Server_Name == null) {
allParametersPresent = false
} else {
dsT_host = params.DS_Tester_Server_Name
}
if (params.DS_Tester_Server_Login == null) {
allParametersPresent = false
} else {
dsT_host_user = params.DS_Tester_Server_Login
}
if (allParametersPresent) {
echo "DS Tester is on ${dsT_host}"
} else {
echo "Some DS Tester parameters are missing!"
sh "./ci-scripts/fail.sh"
}
}
}
// Find out the cause of the trigger
for (cause in currentBuild.getBuildCauses()) {
if (cause.toString() ==~ /.*UpstreamCause.*/) {
upstreamEvent = true
//} else {
// scmEvent = true
}
}
if (upstreamEvent) {
if (params.NRF_TAG != null) {
nrfTag = params.NRF_TAG
echo "Upstream Job passed NRF_TAG to use: ${nrfTag}"
}
if (params.NRF_BRANCH != null) {
nrfBranch = params.NRF_BRANCH
echo "Upstream Job passed NRF_BRANCH to use: ${nrfBranch}"
}
if (params.AMF_TAG != null) {
amfTag = params.AMF_TAG
echo "Upstream Job passed AMF_TAG to use: ${amfTag}"
}
if (params.AMF_BRANCH != null) {
amfBranch = params.AMF_BRANCH
echo "Upstream Job passed AMF_BRANCH to use: ${amfBranch}"
}
if (params.SMF_TAG != null) {
smfTag = params.SMF_TAG
echo "Upstream Job passed SMF_TAG to use: ${smfTag}"
}
if (params.SMF_BRANCH != null) {
smfBranch = params.SMF_BRANCH
echo "Upstream Job passed SMF_BRANCH to use: ${smfBranch}"
}
if (params.SPGWU_TAG != null) {
spgwuTag = params.SPGWU_TAG
echo "Upstream Job passed SPGWU_TAG to use: ${spgwuTag}"
}
if (params.SPGWU_BRANCH != null) {
spgwuBranch = params.SPGWU_BRANCH
echo "Upstream Job passed SPGWU_BRANCH to use: ${spgwuBranch}"
}
if (params.AUSF_TAG != null) {
ausfTag = params.AUSF_TAG
echo "Upstream Job passed AUSF_TAG to use: ${ausfTag}"
}
if (params.AUSF_BRANCH != null) {
ausfBranch = params.AUSF_BRANCH
echo "Upstream Job passed AUSF_TAG to use: ${ausfBranch}"
}
if (params.UDM_TAG != null) {
udmTag = params.UDM_TAG
echo "Upstream Job passed UDM_TAG to use: ${udmTag}"
}
if (params.UDM_BRANCH != null) {
udmBranch = params.UDM_BRANCH
echo "Upstream Job passed UDM_TAG to use: ${udmBranch}"
}
if (params.UDR_TAG != null) {
udrTag = params.UDR_TAG
echo "Upstream Job passed UDR_TAG to use: ${udrTag}"
}
if (params.UDR_BRANCH != null) {
udrBranch = params.UDR_BRANCH
echo "Upstream Job passed UDR_TAG to use: ${udrBranch}"
}
sh "git clean -x -d -f > /dev/null 2>&1"
sh "git fetch --prune > /dev/null 2>&1"
sh 'git checkout -f ' + upstreamTagToUse
sh "zip -r -qq oai-cn5g-fed.zip .git"
sh "mkdir -p archives DS-TEST-RESULTS"
sh './scripts/syncComponents.sh --nrf-branch ' + nrfBranch + ' --amf-branch ' + amfBranch + ' --smf-branch ' + smfBranch + ' --spgwu-tiny-branch ' + spgwuBranch + ' --ausf-branch ' + ausfBranch + ' --udm-branch ' + udmBranch + ' --udr-branch ' + udrBranch
}
if (scmEvent) {
sh "git clean -x -d -f > /dev/null 2>&1"
if ("MERGE".equals(env.gitlabActionType)) {
sh "./ci-scripts/doGitLabMerge.sh --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}"
}
sh "zip -r -qq oai-cn5g-fed.zip .git"
sh "mkdir -p archives DS-TEST-RESULTS"
sh './scripts/syncComponents.sh --nrf-branch develop --amf-branch develop --smf-branch develop --spgwu-tiny-branch develop --ausf-branch develop --udm-branch develop --udr-branch develop'
}
if ((!upstreamEvent) && (!scmEvent)) {
sh "git clean -x -d -f > /dev/null 2>&1"
sh './scripts/syncComponents.sh --nrf-branch ' + nrfBranch + ' --amf-branch ' + amfBranch + ' --smf-branch ' + smfBranch + ' --spgwu-tiny-branch ' + spgwuBranch + ' --ausf-branch ' + ausfBranch + ' --udm-branch ' + udmBranch + ' --udr-branch ' + udrBranch
sh "mkdir -p archives DS-TEST-RESULTS"
}
// Verify that the images are available
try {
sh 'echo "OAI_NRF_TAG: oai-nrf:' + nrfTag +'" > archives/oai_nrf_image_info.log'
sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-nrf:' + nrfTag + ' >> archives/oai_nrf_image_info.log'
sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-nrf:' + nrfTag + ' >> archives/oai_nrf_image_info.log'
} catch (Exception e) {
error "OAI NRF Image tag to test does not exist!"
}
try {
sh 'echo "OAI_AMF_TAG: oai-amf:' + amfTag +'" > archives/oai_amf_image_info.log'
sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-amf:' + amfTag + ' >> archives/oai_amf_image_info.log'
sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-amf:' + amfTag + ' >> archives/oai_amf_image_info.log'
} catch (Exception e) {
error "OAI AMF Image tag to test does not exist!"
}
try {
sh 'echo "OAI_SMF_TAG: oai-smf:' + smfTag +'" > archives/oai_smf_image_info.log'
sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-smf:' + smfTag + ' >> archives/oai_smf_image_info.log'
sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-smf:' + smfTag + ' >> archives/oai_smf_image_info.log'
} catch (Exception e) {
error "OAI SMF Image tag to test does not exist!"
}
try {
sh 'echo "OAI_SPGWU_TAG: oai-spgwu-tiny:' + spgwuTag +'" > archives/oai_spgwu_image_info.log'
sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-spgwu-tiny:' + spgwuTag + ' >> archives/oai_spgwu_image_info.log'
sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-spgwu-tiny:' + spgwuTag + ' >> archives/oai_spgwu_image_info.log'
} catch (Exception e) {
error "OAI SPGW-U-Tiny Image tag to test does not exist!"
}
try {
sh 'echo "OAI_AUSF_TAG: oai-ausf:' + ausfTag +'" > archives/oai_ausf_image_info.log'
sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-ausf:' + ausfTag + ' >> archives/oai_ausf_image_info.log'
sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-ausf:' + ausfTag + ' >> archives/oai_ausf_image_info.log'
} catch (Exception e) {
error "OAI AUSF Image tag to test does not exist!"
}
try {
sh 'echo "OAI_UDM_TAG: oai-udm:' + udmTag +'" > archives/oai_udm_image_info.log'
sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-udm:' + udmTag + ' >> archives/oai_udm_image_info.log'
sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-udm:' + udmTag + ' >> archives/oai_udm_image_info.log'
} catch (Exception e) {
error "OAI UDM Image tag to test does not exist!"
}
try {
sh 'echo "OAI_UDR_TAG: oai-udr:' + udrTag +'" > archives/oai_udr_image_info.log'
sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-udr:' + udrTag + ' >> archives/oai_udr_image_info.log'
sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-udr:' + udrTag + ' >> archives/oai_udr_image_info.log'
} catch (Exception e) {
error "OAI UDR Image tag to test does not exist!"
}
}
}
}
stage ('Deploy Whole 5G Core Network and Test with DS Tester') {
when { expression {dsT_host_flag} }
steps {
lock (ds_tester_ci_resource) {
script {
echo '\u2705 \u001B[32mDeploy CN5G using Docker-Compose and Testing with DS Tester\u001B[0m'
if (fileExists("dstester")) {
sh "rm -Rf dstester > /dev/null 2>&1"
}
sh "mkdir -p dstester"
dir ('dstester') {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.dsTesterGitLabRepository_Credentials}", usernameVariable: 'git_username', passwordVariable: 'git_token']
]) {
sh "git clone https://${git_username}:${git_token}@github.com/OPENAIRINTERFACE/chasseur.git . > ../git_clone.log 2>&1"
sh "git checkout -f " + dsTesterTag + " >> ../git_clone.log 2>&1"
// First put all correct tags to test
sh 'sed -i -e "s#NRF_TAG#' + nrfTag + '#" -i -e "s#AMF_TAG#' + amfTag + '#" -i -e "s#SMF_TAG#' + smfTag + '#" -i -e "s#UPF_TAG#' + spgwuTag + '#" -i -e "s#AUSF_TAG#' + ausfTag + '#" -i -e "s#UDM_TAG#' + udmTag + '#" -i -e "s#UDR_TAG#' + udrTag + '#" ./jenkins/suits/dc/integration/integration-basic.yaml'
// Check the route b/w the container and DSTester n/w and add
sh 'python3 ../ci-scripts/routeCheck.py --mode Add --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-basic/integration.tplt'
dir ('jenkins') {
try {
sh "python3 ./chasseurdocker.py -f ./suits/dc/integration/integration-basic.yaml | tee ../../DS-TEST-RESULTS/dsTester_Summary.txt"
} catch (Exception e) {
currentBuild.result = 'FAILURE'
echo "dsTester FrameWork FAILED"
}
}
sh 'python3 ../ci-scripts/routeCheck.py --mode Delete --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-basic/integration.tplt'
}
}
sh "python3 ./ci-scripts/toCheckDSTesterResult.py"
}
}
}
}
}
post {
always {
script {
// Generating the HTML report
sh 'python3 ./ci-scripts/dsTestGenerateHTMLReport.py --job_name ' + JOB_NAME + ' --job_id ' + BUILD_ID + ' --job_url ' + BUILD_URL
// Zipping all archived log files
sh "zip -r -qq cn5g_fed_docker_logs.zip archives DS-TEST-RESULTS"
if (fileExists('cn5g_fed_docker_logs.zip')) {
archiveArtifacts artifacts: 'cn5g_fed_docker_logs.zip'
}
if (fileExists('test_results_oai_cn5g_basic.html')) {
archiveArtifacts artifacts: 'test_results_oai_cn5g_basic.html'
}
}
}
}
}
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
//-------------------------------------------------------------------------------
// Abstraction function to send social media messages:
// like on Slack or Mattermost
def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) {
if (params.pipelineUsesSlack != null) {
if (params.pipelineUsesSlack) {
slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage
}
}
}
// Location of the CN executor node
def cn_ci_host = params.Host_CN_CI_Server
// for lock
def cn_ci_resource = params.DockerContainers
//-------------------------------------------------------------------------------
// Pipeline start
pipeline {
agent {
label cn_ci_host
}
options {
disableConcurrentBuilds()
timestamps()
ansiColor('xterm')
lock(cn_ci_resource)
}
stages {
stage ('Verify Parameters') {
steps {
script {
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
JOB_TIMESTAMP = JOB_TIMESTAMP.trim()
sh "git clean -x -d -f > /dev/null 2>&1"
sh 'git log -n1'
if (params.CheckAnotherBranch != null) {
if (params.CheckAnotherBranch) {
if (params.BranchToCheck != null) {
sh 'git checkout '+ params.BranchToCheck
sh 'git log -n1'
}
}
}
sh "mkdir -p archives/cn5g"
sh "mkdir -p archives/cn5gwithnoNRF"
sh "mkdir -p archives/gnbsim"
}
}
}
stage ('Deploy Whole 5G Core Network with NRF') {
steps {
script {
echo '\u2705 \u001B[32mDeploy CN5G using Docker-Compose with NRF\u001B[0m'
dir('docker-compose') {
sh 'sed -i -e "s@latest@develop@g" docker-compose.yaml'
sh 'docker-compose -f docker-compose.yaml up -d > ../archives/cn5g/compose_5gcn_up.log 2>&1'
sh 'sleep 100'
// Do a check on number of healthy containers
// 5 == mysql + nrf + amf + smf + upf(spgwu-tiny)
ret = sh returnStdout: true, script: 'docker-compose ps -a | grep -v unhealthy | grep -c healthy || true'
ret = ret.trim()
if (ret != '5') {
error "Deployment went wrong!"
}
}
}
}
post {
always {
script {
sh 'docker logs oai-nrf > archives/cn5g/oai_nrf.log 2>&1 || true'
sh 'docker logs oai-amf > archives/cn5g/oai_amf.log 2>&1 || true'
sh 'docker logs oai-smf > archives/cn5g/oai_smf.log 2>&1 || true'
sh 'docker logs oai-spgwu > archives/cn5g/oai_spgwu.log 2>&1 || true'
}
}
success {
script {
sh 'echo "DEPLOYMENT: OK"'
}
}
unsuccessful {
script {
sh 'echo "DEPLOYMENT: KO"'
}
}
}
}
stage ('Undeploy 5G-CN with NRF') {
steps {
script {
echo '\u2705 \u001B[32mUn-Deploy CN5G with NRF\u001B[0m'
dir('docker-compose') {
sh 'docker-compose down > ../archives/cn5g/compose_normal_down.log 2>&1'
}
}
}
}
stage ('Deploy Whole 5G Core Network without NRF') {
steps {
script {
echo '\u2705 \u001B[32mDeploy CN5G using Docker-Compose without NRF\u001B[0m'
dir('docker-compose') {
sh 'sed -i -e "s@latest@develop@g" docker-compose-no-nrf.yaml'
sh 'docker-compose -f docker-compose-no-nrf.yaml up -d > ../archives/cn5gwithnoNRF/compose_5gcn_up.log 2>&1'
sh 'sleep 100'
// Do a check on number of healthy containers
// 4 == mysql + amf + smf + upf(spgwu-tiny)
ret = sh returnStdout: true, script: 'docker-compose -f docker-compose-no-nrf.yaml ps -a | grep -v unhealthy | grep -c healthy || true'
ret = ret.trim()
if (ret != '4') {
error "Deployment went wrong without NRF!"
}
}
}
}
post {
always {
script {
sh 'docker logs oai-amf > archives/cn5gwithnoNRF/oai_amf.log 2>&1 || true'
sh 'docker logs oai-smf > archives/cn5gwithnoNRF/oai_smf.log 2>&1 || true'
sh 'docker logs oai-spgwu > archives/cn5gwithnoNRF/oai_spgwu.log 2>&1 || true'
}
}
success {
script {
sh 'echo "DEPLOYMENT: OK without NRF"'
}
}
unsuccessful {
script {
dir('docker-compose') {
sh 'docker-compose -f docker-compose-no-nrf.yaml down || true'
}
sh 'echo "DEPLOYMENT: KO without NRF"'
}
}
}
}
stage ('Undeploy 5G-CN without NRF') {
steps {
script {
echo '\u2705 \u001B[32mUn-Deploy CN5G without NRF\u001B[0m'
dir('docker-compose') {
sh 'docker-compose -f docker-compose-no-nrf.yaml down > ../archives/cn5gwithnoNRF/compose_normal_down.log 2>&1'
}
}
}
}
stage ('gnbsim tutorial') {
steps {
script {
echo '\u2705 \u001B[32mDeploy CN5G using Docker-Compose with NRF\u001B[0m'
dir('docker-compose') {
sh 'docker-compose -f docker-compose.yaml up -d > ../archives/gnbsim/compose_5gcn_up.log 2>&1'
sh 'sleep 100'
// Do a check on number of healthy containers
// 5 == mysql + nrf + amf + smf + upf(spgwu-tiny)
ret = sh returnStdout: true, script: 'docker-compose ps -a | grep -v unhealthy | grep -c healthy || true'
ret = ret.trim()
if (ret != '5') {
error "Deployment went wrong!"
}
sh 'docker-compose -f docker-compose-gnbsim.yaml up -d gnbsim > ../archives/gnbsim/gnbsim_up.log 2>&1'
sh 'sleep 20'
// Do a check if gnbsim is healthy
ret = sh returnStdout: true, script: 'docker-compose -f docker-compose-gnbsim.yaml ps -a | grep -v unhealthy | grep -c healthy || true'
ret = ret.trim()
if (ret != '1') {
error "gnbsim deployment went wrong"
}
}
}
}
post {
always {
script {
sh 'docker logs oai-nrf > archives/gnbsim/oai_nrf.log 2>&1 || true'
sh 'docker logs oai-amf > archives/gnbsim/oai_amf.log 2>&1 || true'
sh 'docker logs oai-smf > archives/gnbsim/oai_smf.log 2>&1 || true'
sh 'docker logs oai-spgwu > archives/gnbsim/oai_spgwu.log 2>&1 || true'
sh 'docker logs gnbsim > archives/gnbsim/gnbsim.log 2>&1 || true'
}
}
success {
script {
sh 'echo "DEPLOYMENT: OK"'
}
}
unsuccessful {
script {
dir('docker-compose') {
sh 'docker-compose -f docker-compose-gnbsim.yaml down || true'
sh 'docker-compose down || true'
}
sh 'echo "DEPLOYMENT: KO"'
}
}
}
}
stage ('Un-deploy gnbsim tutorial') {
steps {
script {
dir('docker-compose') {
sh 'docker-compose -f docker-compose-gnbsim.yaml down > ../archives/gnbsim/gnbsim_down.log 2>&1'
sh 'docker-compose down >> ../archives/gnbsim/cn5g_down.log 2>&1'
}
}
}
}
}
post {
always {
script {
// Remove any leftover containers/networks
dir('docker-compose') {
sh 'docker-compose -f docker-compose-gnbsim.yaml down || true'
sh 'docker-compose down || true'
}
// Zipping all archived log files
sh "zip -r -qq cn5g_deploy_docker_logs.zip archives"
if (fileExists('cn5g_deploy_docker_logs.zip')) {
archiveArtifacts artifacts: 'cn5g_deploy_docker_logs.zip'
}
}
}
}
}
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
//-------------------------------------------------------------------------------
// Abstraction function to send social media messages:
// like on Slack or Mattermost
def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) {
if (params.pipelineUsesSlack != null) {
if (params.pipelineUsesSlack) {
slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage
}
}
}
// Location of the CN executor node
def cn_ci_host = params.Host_CN_CI_Server
// for lock
def cn_ci_resource = params.DockerContainers
def ds_tester_ci_resource = params.DsTester
// When triggered by upstream, specify which tag to use
def upstreamTagToUse = params.upstreamTagToUse
// Location of the CN tester
def dsT_host_flag = false
def dsT_host = ""
def dsT_host_user = ""
// dsTester tag to use
def dsTesterTag = params.DSTESTER_TAG
// Flags
def scmEvent = false
def upstreamEvent = false
def deployed = true
// Default tags / branches --> could be passed on by upstream job or by PR content
def nrfTag = params.nrfTag
def nrfBranch = params.nrfBranch
def amfTag = params.amfTag
def amfBranch = params.amfBranch
def smfTag = params.smfTag
def smfBranch = params.smfBranch
def spgwuTag = params.spgwuTag
def spgwuBranch = params.spgwuBranch
//-------------------------------------------------------------------------------
// Pipeline start
pipeline {
agent {
label cn_ci_host
}
options {
disableConcurrentBuilds()
timestamps()
ansiColor('xterm')
lock(cn_ci_resource)
}
stages {
stage ('Verify Parameters') {
steps {
script {
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
JOB_TIMESTAMP = JOB_TIMESTAMP.trim()
echo "Node is ${NODE_NAME}"
if (params.DS_Tester_Server_Flag != null) {
dsT_host_flag = params.DS_Tester_Server_Flag
if (dsT_host_flag) {
def allParametersPresent = true
if (params.DS_Tester_Server_Name == null) {
allParametersPresent = false
} else {
dsT_host = params.DS_Tester_Server_Name
}
if (params.DS_Tester_Server_Login == null) {
allParametersPresent = false
} else {
dsT_host_user = params.DS_Tester_Server_Login
}
if (allParametersPresent) {
echo "DS Tester is on ${dsT_host}"
} else {
echo "Some DS Tester parameters are missing!"
sh "./ci-scripts/fail.sh"
}
}
}
// Find out the cause of the trigger
for (cause in currentBuild.getBuildCauses()) {
if (cause.toString() ==~ /.*UpstreamCause.*/) {
upstreamEvent = true
//} else {
// scmEvent = true
}
}
if (upstreamEvent) {
if (params.NRF_TAG != null) {
nrfTag = params.NRF_TAG
echo "Upstream Job passed NRF_TAG to use: ${nrfTag}"
}
if (params.NRF_BRANCH != null) {
nrfBranch = params.NRF_BRANCH
echo "Upstream Job passed NRF_BRANCH to use: ${nrfBranch}"
}
if (params.AMF_TAG != null) {
amfTag = params.AMF_TAG
echo "Upstream Job passed AMF_TAG to use: ${amfTag}"
}
if (params.AMF_BRANCH != null) {
amfBranch = params.AMF_BRANCH
echo "Upstream Job passed AMF_BRANCH to use: ${amfBranch}"
}
if (params.SMF_TAG != null) {
smfTag = params.SMF_TAG
echo "Upstream Job passed SMF_TAG to use: ${smfTag}"
}
if (params.SMF_BRANCH != null) {
smfBranch = params.SMF_BRANCH
echo "Upstream Job passed SMF_BRANCH to use: ${smfBranch}"
}
if (params.SPGWU_TAG != null) {
spgwuTag = params.SPGWU_TAG
echo "Upstream Job passed SPGWU_TAG to use: ${spgwuTag}"
}
if (params.SPGWU_BRANCH != null) {
spgwuBranch = params.SPGWU_BRANCH
echo "Upstream Job passed SPGWU_BRANCH to use: ${spgwuBranch}"
}
sh "git clean -x -d -f > /dev/null 2>&1"
sh "git fetch --prune > /dev/null 2>&1"
sh 'git checkout -f ' + upstreamTagToUse
sh "zip -r -qq oai-cn5g-fed.zip .git"
sh "mkdir -p archives DS-TEST-RESULTS"
sh './scripts/syncComponents.sh --nrf-branch ' + nrfBranch + ' --amf-branch ' + amfBranch + ' --smf-branch ' + smfBranch + ' --spgwu-tiny-branch ' + spgwuBranch
}
if (scmEvent) {
sh "git clean -x -d -f > /dev/null 2>&1"
if ("MERGE".equals(env.gitlabActionType)) {
sh "./ci-scripts/doGitLabMerge.sh --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}"
}
sh "zip -r -qq oai-cn5g-fed.zip .git"
sh "mkdir -p archives DS-TEST-RESULTS"
sh './scripts/syncComponents.sh --nrf-branch develop --amf-branch develop --smf-branch develop --spgwu-tiny-branch develop'
}
if ((!upstreamEvent) && (!scmEvent)) {
sh "git clean -x -d -f > /dev/null 2>&1"
sh './scripts/syncComponents.sh --nrf-branch ' + nrfBranch + ' --amf-branch ' + amfBranch + ' --smf-branch ' + smfBranch + ' --spgwu-tiny-branch ' + spgwuBranch
sh "mkdir -p archives DS-TEST-RESULTS"
}
// Verify that the images are available
try {
sh 'echo "OAI_NRF_TAG: oai-nrf:' + nrfTag +'" > archives/oai_nrf_image_info.log'
sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-nrf:' + nrfTag + ' >> archives/oai_nrf_image_info.log'
sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-nrf:' + nrfTag + ' >> archives/oai_nrf_image_info.log'
} catch (Exception e) {
error "OAI NRF Image tag to test does not exist!"
}
try {
sh 'echo "OAI_AMF_TAG: oai-amf:' + amfTag +'" > archives/oai_amf_image_info.log'
sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-amf:' + amfTag + ' >> archives/oai_amf_image_info.log'
sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-amf:' + amfTag + ' >> archives/oai_amf_image_info.log'
} catch (Exception e) {
error "OAI AMF Image tag to test does not exist!"
}
try {
sh 'echo "OAI_SMF_TAG: oai-smf:' + smfTag +'" > archives/oai_smf_image_info.log'
sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-smf:' + smfTag + ' >> archives/oai_smf_image_info.log'
sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-smf:' + smfTag + ' >> archives/oai_smf_image_info.log'
} catch (Exception e) {
error "OAI SMF Image tag to test does not exist!"
}
try {
sh 'echo "OAI_SPGWU_TAG: oai-spgwu-tiny:' + spgwuTag +'" > archives/oai_spgwu_image_info.log'
sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-spgwu-tiny:' + spgwuTag + ' >> archives/oai_spgwu_image_info.log'
sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-spgwu-tiny:' + spgwuTag + ' >> archives/oai_spgwu_image_info.log'
} catch (Exception e) {
error "OAI SPGW-U-Tiny Image tag to test does not exist!"
}
}
}
}
stage ('Deploy 5G Core Network and Test with DS Tester') {
when { expression {dsT_host_flag} }
steps {
lock (ds_tester_ci_resource) {
script {
echo '\u2705 \u001B[32mDeploy CN5G using Docker-Compose and Testing with DS Tester\u001B[0m'
if (fileExists("dstester")) {
sh "rm -Rf dstester > /dev/null 2>&1"
}
sh "mkdir -p dstester"
dir ('dstester') {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.dsTesterGitLabRepository_Credentials}", usernameVariable: 'git_username', passwordVariable: 'git_token']
]) {
sh "git clone https://${git_username}:${git_token}@github.com/OPENAIRINTERFACE/chasseur.git . > ../git_clone.log 2>&1"
sh "git checkout -f " + dsTesterTag + " >> ../git_clone.log 2>&1"
// First put all correct tags to test
sh 'sed -i -e "s#NRF_TAG#' + nrfTag + '#" -i -e "s#AMF_TAG#' + amfTag + '#" -i -e "s#SMF_TAG#' + smfTag + '#" -i -e "s#SPGWU_TAG#' + spgwuTag + '#" ./jenkins/suits/dc/integration/integration-mini.yaml'
// Check the route b/w the container and DSTester n/w and add
sh 'python3 ../ci-scripts/routeCheck.py --mode Add --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-mini/integration.tplt'
dir ('jenkins') {
try {
sh "python3 ./chasseurdocker.py -f ./suits/dc/integration/integration-mini.yaml | tee ../../DS-TEST-RESULTS/dsTester_Summary.txt"
} catch (Exception e) {
currentBuild.result = 'FAILURE'
echo "dsTester FrameWork FAILED"
}
}
sh 'python3 ../ci-scripts/routeCheck.py --mode Delete --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-mini/integration.tplt'
}
}
sh "python3 ./ci-scripts/toCheckDSTesterResult.py"
}
}
}
}
}
post {
always {
script {
// Generating the HTML report
sh 'python3 ./ci-scripts/dsTestGenerateHTMLReport2.py --job_name ' + JOB_NAME + ' --job_id ' + BUILD_ID + ' --job_url ' + BUILD_URL
// Zipping all archived log files
sh "zip -r -qq cn5g_fed_docker_logs.zip archives DS-TEST-RESULTS"
if (fileExists('cn5g_fed_docker_logs.zip')) {
archiveArtifacts artifacts: 'cn5g_fed_docker_logs.zip'
}
if (fileExists('test_results_oai_cn5g_mini.html')) {
archiveArtifacts artifacts: 'test_results_oai_cn5g_mini.html'
}
}
}
}
}
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
//-------------------------------------------------------------------------------
// Abstraction function to send social media messages:
// like on Slack or Mattermost
def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) {
if (params.pipelineUsesSlack != null) {
if (params.pipelineUsesSlack) {
slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage
}
}
}
// Location of the CN executor node
def cn_ci_host = params.Host_CN_CI_Server
// for lock
def cn_ci_resource = params.CN_CI_Resource
def ds_tester_ci_resource = params.DsTester
// When triggered by upstream, specify which tag to use
def upstreamTagToUse = params.upstreamTagToUse
// Location of the CN tester
def dsT_host_flag = false
def dsT_host = ""
def dsT_host_user = ""
def dsT_host_ip_addr = ""
// dsTester tag to use
def dsTesterTag = params.DSTESTER_TAG
// Flags
def scmEvent = false
def upstreamEvent = false
// Default tags --> could be passed on by upstream job or by PR content
def nrfTag = params.nrfTag
def amfTag = params.amfTag
def smfTag = params.smfTag
def spgwuTag = params.spgwuTag
def udrTag = params.udrTag
def udmTag = params.udmTag
def ausfTag = params.ausfTag
//-------------------------------------------------------------------------------
// Pipeline start
pipeline {
agent {
label cn_ci_host
}
options {
disableConcurrentBuilds()
timestamps()
ansiColor('xterm')
lock(cn_ci_resource)
}
stages {
stage ('Verify Parameters') {
steps {
script {
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
JOB_TIMESTAMP = JOB_TIMESTAMP.trim()
def allParametersPresent = true
if (params.OC_Credentials == null) {
allParametersPresent = false
}
if (params.OC_ProjectName == null) {
allParametersPresent = false
}
if (allParametersPresent) {
echo "Cluster Access parameters are present"
} else {
echo "Some Cluster Access parameters are missing"
sh "./ci-scripts/fail.sh"
}
if (params.DS_Tester_Server_Flag != null) {
dsT_host_flag = params.DS_Tester_Server_Flag
if (dsT_host_flag) {
allParametersPresent = true
if (params.DS_Tester_Server_Name == null) {
allParametersPresent = false
} else {
dsT_host = params.DS_Tester_Server_Name
}
if (params.DS_Tester_Server_Login == null) {
allParametersPresent = false
} else {
dsT_host_user = params.DS_Tester_Server_Login
}
if (params.DS_Tester_Server_IP_Addr == null) {
allParametersPresent = false
} else {
dsT_host_ip_addr = params.DS_Tester_Server_IP_Addr
}
if (params.dsTesterGitLabRepository_Credentials == null) {
allParametersPresent = false
}
if (allParametersPresent) {
echo "DS Tester is on ${dsT_host}"
} else {
echo "Some DS Tester parameters are missing!"
sh "./ci-scripts/fail.sh"
}
}
}
// Clean workspace and prepare artifacts location
sh "git clean -x -d -f > /dev/null 2>&1"
sh "mkdir -p archives DS-TEST-RESULTS"
// Find out the cause of the trigger
for (cause in currentBuild.getBuildCauses()) {
if (cause.toString() ==~ /.*UpstreamCause.*/) {
upstreamEvent = true
}
}
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password']
]) {
if (upstreamEvent) {
if (params.NRF_TAG != null) {
nrfTag = params.NRF_TAG
echo "Upstream Job passed NRF_TAG to use: ${nrfTag}"
}
if (params.AMF_TAG != null) {
amfTag = params.AMF_TAG
echo "Upstream Job passed AMF_TAG to use: ${amfTag}"
}
if (params.SMF_TAG != null) {
smfTag = params.SMF_TAG
echo "Upstream Job passed SMF_TAG to use: ${smfTag}"
}
if (params.SPGWU_TAG != null) {
spgwuTag = params.SPGWU_TAG
echo "Upstream Job passed SPGWU_TAG to use: ${spgwuTag}"
}
if (params.UDR_TAG != null) {
udrTag = params.UDR_TAG
echo "Upstream Job passed UDR_TAG to use: ${udrTag}"
}
if (params.UDM_TAG != null) {
udmTag = params.UDM_TAG
echo "Upstream Job passed UDM_TAG to use: ${udmTag}"
}
if (params.AUSF_TAG != null) {
ausfTag = params.AUSF_TAG
echo "Upstream Job passed AUSF_TAG to use: ${ausfTag}"
}
sh "git clean -x -d -f > /dev/null 2>&1"
sh "git fetch --prune > /dev/null 2>&1"
sh 'git checkout -f ' + upstreamTagToUse
sh 'mkdir -p archives DS-TEST-RESULTS'
}
imageTags = "mysql:8.0,oai-nrf:${nrfTag},oai-udr:${udrTag},oai-udm:${udmTag},oai-ausf:${ausfTag},oai-amf:${amfTag},oai-smf:${smfTag},oai-spgwu-tiny:${spgwuTag}"
}
}
}
}
stage ('Deploy Whole 5G Core Network') {
steps {
script {
echo '\u2705 \u001B[32mDeploy CN5G on Cluster\u001B[0m'
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password']
]) {
sh "python3 ci-scripts/helmDeploy.py --mode=Deploy --OCUserName=${OC_Username} --OCPassword=${OC_Password} --OCProjectName=${OC_ProjectName} --imageTags=${imageTags}"
}
}
}
}
stage ('Check with DS Tester') {
when { expression {dsT_host_flag} }
steps {
lock (ds_tester_ci_resource) {
script {
echo '\u2705 \u001B[32mTesting with DS Tester\u001B[0m'
if (fileExists("dstester")) {
sh "rm -Rf dstester > /dev/null 2>&1"
}
sh "mkdir -p dstester"
dir ('dstester') {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.dsTesterGitLabRepository_Credentials}", usernameVariable: 'git_username', passwordVariable: 'git_token']
]) {
sh "git clone https://${git_username}:${git_token}@github.com/OPENAIRINTERFACE/chasseur.git . > ../git_clone.log 2>&1"
sh "git checkout -f " + dsTesterTag + " >> ../git_clone.log 2>&1"
dir ('jenkins') {
try {
sh "python3 ./dogmatix-agent.py -f ./suits/hc/integration.yaml -d true | tee ../../DS-TEST-RESULTS/dsTester_Summary.txt"
} catch (Exception e) {
currentBuild.result = 'FAILURE'
echo "dsTester Running FAILED"
}
}
}
}
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password']
]) {
try {
sh "python3 ci-scripts/helmDeploy.py --mode=GetLogs --OCUserName=${OC_Username} --OCPassword=${OC_Password} --OCProjectName=${OC_ProjectName} --imageTags=${imageTags}"
} catch (Exception e) {
echo "Unable to collect the logs, configs, pcaps"
}
}
sh "python3 ./ci-scripts/toCheckDSTesterResult1.py"
}
}
}
}
stage ('Undeploy 5G-CN') {
steps {
script {
echo '\u2705 \u001B[32mUnDeploy CN5G on Cluster\u001B[0m'
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password']
]) {
sh "python3 ci-scripts/helmDeploy.py --mode=UnDeploy --OCUserName=${OC_Username} --OCPassword=${OC_Password} --OCProjectName=${OC_ProjectName} --imageTags=${imageTags}"
}
}
}
}
}
post {
always {
script {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password']
]) {
// Remove any leftover containers/networks
sh "python3 ci-scripts/helmDeploy.py --mode=UnDeploy --OCUserName=${OC_Username} --OCPassword=${OC_Password} --OCProjectName=${OC_ProjectName} --imageTags=${imageTags}"
}
// Generating the HTML report
sh 'python3 ./ci-scripts/dsTestGenerateHTMLReport1.py --job_name=' + JOB_NAME + ' --job_id=' + BUILD_ID + ' --job_url=' + BUILD_URL
// Zipping all archived log files
sh "zip -r -qq cn5g_fed_cluster_logs.zip archives DS-TEST-RESULTS"
sh "rm -rf archives DS-TEST-RESULTS"
if (fileExists('cn5g_fed_cluster_logs.zip')) {
archiveArtifacts artifacts: 'cn5g_fed_cluster_logs.zip'
}
if (fileExists('test_results_oai_cn5g_oc.html')) {
archiveArtifacts artifacts: 'test_results_oai_cn5g_oc.html'
}
}
}
}
}
......@@ -101,12 +101,15 @@ pipeline {
upstreamJobs[UDR] = true
}
}
sh "git clean -x -d -f > /dev/null 2>&1"
sh "mkdir -p archives"
sh "git clean -x -d -ff > /dev/null 2>&1"
sh "git submodule foreach --recursive 'git clean -x -d -ff' > /dev/null 2>&1"
sh "git submodule deinit --force --all > /dev/null 2>&1"
// For any upstream job (PR or post-merge), let run on certified tag.
if (upstreamEvent) {
sh 'git checkout -f ' + upstreamTagToUse
}
sh 'git submodule update --init --recursive ci-scripts/common'
sh "mkdir -p archives"
for (ii = 0; ii < imageNames.size(); ii++) {
if (ii == NRF) {
......
......@@ -96,12 +96,15 @@ pipeline {
upstreamJobs[UDR] = true
}
}
sh "git clean -x -d -f > /dev/null 2>&1"
sh "mkdir -p archives"
sh "git clean -x -d -ff > /dev/null 2>&1"
sh "git submodule foreach --recursive 'git clean -x -d -ff' > /dev/null 2>&1"
sh "git submodule deinit --force --all > /dev/null 2>&1"
// For any upstream job (PR or post-merge), let run on certified tag.
if (upstreamEvent) {
sh 'git checkout -f ' + upstreamTagToUse
}
sh 'git submodule update --init --recursive ci-scripts/common'
sh "mkdir -p archives"
for (ii = 0; ii < imageNames.size(); ii++) {
if (ii == NRF) {
......
......@@ -116,11 +116,14 @@ pipeline {
}
}
sh "git clean -x -d -f > /dev/null 2>&1"
sh "mkdir -p archives"
sh "git submodule foreach --recursive 'git clean -x -d -ff' > /dev/null 2>&1"
sh "git submodule deinit --force --all > /dev/null 2>&1"
// For any upstream job (PR or post-merge), let run on certified tag.
if (upstreamEvent) {
sh 'git checkout -f ' + upstreamTagToUse
}
sh 'git submodule update --init --recursive ci-scripts/common'
sh "mkdir -p archives"
for (ii = 0; ii < imageNames.size(); ii++) {
if (ii == NRF) {
......@@ -296,7 +299,7 @@ pipeline {
dir ('ci-scripts') {
// Tee will make the command always pass
// Please use the same log name as the folder used in the tutorial
sh './checkTutorial.py --tutorial DEPLOY_SA5G_WITH_GNBSIM.md | tee ../archives/mini-gnbsim.log'
sh './checkTutorial.py --tutorial DEPLOY_SA5G_MINI_WITH_GNBSIM.md | tee ../archives/mini-gnbsim.log'
}
// Checking if FAIL appears
try {
......
......@@ -14,6 +14,8 @@ oai-nrf:
version: NRF_TAG ## The branch to be used to pull from dockerhub
# pullPolicy: IfNotPresent or Never or Always
pullPolicy: IfNotPresent
config:
logLevel: "debug"
nodeSelector: {}
oai-udr:
enabled: true
......@@ -24,6 +26,8 @@ oai-udr:
version: UDR_TAG ## The branch to be used to pull from dockerhub
# pullPolicy: IfNotPresent or Never or Always
pullPolicy: IfNotPresent
config:
logLevel: "debug"
nodeSelector: {}
oai-udm:
enabled: true
......@@ -34,6 +38,8 @@ oai-udm:
version: UDM_TAG ## The branch to be used to pull from dockerhub
# pullPolicy: IfNotPresent or Never or Always
pullPolicy: IfNotPresent
config:
logLevel: "debug"
nodeSelector: {}
oai-ausf:
enabled: true
......@@ -44,6 +50,8 @@ oai-ausf:
version: AUSF_TAG ## The branch to be used to pull from dockerhub
# pullPolicy: IfNotPresent or Never or Always
pullPolicy: IfNotPresent
config:
logLevel: "debug"
nodeSelector: {}
oai-amf:
enabled: true
......@@ -78,6 +86,7 @@ oai-amf:
useHttp2: "no"
intAlgoList: '[ "NIA1" , "NIA2" ]'
ciphAlgoList: '[ "NEA0" , "NEA1" , "NEA2" ]'
logLevel: "debug"
nodeSelector: {} #Using dedicated nodeName in the chart directly
oai-spgwu-tiny:
enabled: true
......@@ -116,6 +125,7 @@ oai-spgwu-tiny:
nssaiSst1: 1 # should match with SMF information (Optional, if removed slice will not be configured)
nssaiSd1: "0xFFFFFF" # should match with SMF information (Optional, if removed sd value will be 0xFFFFFF only if nssaiSst1 is configured)
dnn1: "ims" # should match with SMF information
logLevel: "debug"
nodeSelector: {}
oai-traffic-server:
enabled: true
......@@ -170,4 +180,5 @@ oai-smf:
qosProfile5qi1: 1
sessionAmbrUl1: "1000Mbps"
sessionAmbrDl1: "1000Mbps"
logLevel: "debug"
nodeSelector: {}
......@@ -91,7 +91,7 @@ config:
mySqlUser: "root" # OPTIONAL: used only if not using AUSF
mySqlPass: "linux" # OPTIONAL: used only if not using AUSF
mySqlDb: "oai_db" # OPTIONAL: used only if not using AUSF
logLevel: "info"
logLevel: "debug"
persistent:
sharedvolume: true
......
......@@ -48,7 +48,7 @@ config:
tz: "Europe/Paris"
instanceId: "0"
pidDirectory: "/var/run"
logLevel: "info"
logLevel: "debug"
ausfName: "OAI_AUSF"
sbiIfName: "eth0"
sbiPortHttp1: "80"
......
......@@ -52,7 +52,7 @@ config:
nrfInterfacePortForSBI: "80"
nrfInterfaceHttp2PortForSBI: "8080"
nrfApiVersion: "v1"
logLevel: "info"
logLevel: "debug"
persistent:
sharedvolume: true
......
......@@ -98,7 +98,7 @@ config:
qosProfile5qi1: 1
sessionAmbrUl1: "1000Mbps"
sessionAmbrDl1: "1000Mbps"
logLevel: "info"
logLevel: "debug"
## currently only used by tcpdump container to store the tcpdump, this volume will be shared between all the network functions (If true it should be true for all the NFs specially NRF)
persistent:
......
......@@ -76,7 +76,7 @@ config:
nssaiSst1: 1 # should match with SMF information (Optional, if removed slice will not be configured)
nssaiSd1: "0xFFFFFF" # should match with SMF information (Optional, if removed sd value will be 0xFFFFFF only if nssaiSst1 is configured)
dnn1: "ims" # should match with SMF information
logLevel: "info"
logLevel: "debug"
## currently only used by tcpdump container to store the tcpdump, this volume will be shared between all the network functions
persistent:
......
......@@ -45,7 +45,7 @@ config:
tz: "Europe/Paris"
instance: 0
pidDirectory: "/var/run"
logLevel: "info"
logLevel: "debug"
udmName: "oai-udm"
sbiIfName: "eth0"
sbiPortHttp1: "80"
......
......@@ -52,7 +52,7 @@ config:
instance: "0"
udrname: "oai-udr"
pidDirectory: "/var/run"
logLevel: "info"
logLevel: "debug"
sbiIfName: "eth0"
sbiPortHttp1: "80"
sbiPortHttp2: "8080"
......
......@@ -26,9 +26,8 @@ import logging
import os
import re
import sys
import subprocess
from generate_html import (
from common.python.generate_html import (
generate_header,
generate_footer,
generate_chapter,
......
......@@ -26,7 +26,7 @@ import logging
import re
import sys
import time
import cls_cmd
import common.python.cls_cmd as cls_cmd
logging.basicConfig(
level=logging.DEBUG,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment