diff --git a/.gitmodules b/.gitmodules index b842ba8c294fa590eb14af45345d99bea6df5738..515fe5069bf7e28694f77c4297da4a7ecf014035 100644 --- a/.gitmodules +++ b/.gitmodules @@ -31,3 +31,6 @@ [submodule "component/oai-pcf"] path = component/oai-pcf url = https://gitlab.eurecom.fr/oai/cn5g/oai-cn5g-pcf.git +[submodule "ci-scripts/common"] + path = ci-scripts/common + url = https://gitlab.eurecom.fr/oai/cn5g/oai-cn5g-common-ci.git diff --git a/ci-scripts/Jenkinsfile-GitLab-COTS-UE-Test b/ci-scripts/Jenkinsfile-GitLab-COTS-UE-Test index 5edd971da598084732a3eded8b2d641421fac970..810df254bf2509b9d42d77ce2b757d29aa8463e2 100644 --- a/ci-scripts/Jenkinsfile-GitLab-COTS-UE-Test +++ b/ci-scripts/Jenkinsfile-GitLab-COTS-UE-Test @@ -87,7 +87,10 @@ pipeline { steps { script { echo '\u2705 \u001B[32mVerify Parameters\u001B[0m' - sh "git clean -x -d -f > /dev/null 2>&1" + sh "git clean -x -d -ff > /dev/null 2>&1" + sh "git submodule foreach --recursive 'git clean -x -d -ff' > /dev/null 2>&1" + sh "git submodule deinit --force --all > /dev/null 2>&1" + sh "git submodule update --init --recursive ci-scripts/common" sh "mkdir -p archives" for (ii = 0; ii < imageNames.size(); ii++) { if (ii == NRF) { @@ -546,6 +549,14 @@ def retrieveLogsFromPods() { echo "Getting logs from ${podName} failed" } } + // Retrieving mysql logs + podName = sh returnStdout: true, script: "oc get pods | grep mysql | awk {'print \$1'} || true" + podName = podName.trim() + try { + sh "oc logs ${podName} &> archives/mysql.logs" + } catch (Exception e) { + echo "Getting logs from ${podName} failed" + } sh "oc describe pod &> archives/describe-pods-post-test.logs" sh "oc get pods.metrics.k8s.io &> archives/nf-resource-consumption.log" if (nrfPod.contains("oai-nrf")) { diff --git a/ci-scripts/Jenkinsfile-GitLab-Docker b/ci-scripts/Jenkinsfile-GitLab-Docker deleted file mode 100644 index 212e74139faf7747c458624e2c15629eea5dec6e..0000000000000000000000000000000000000000 --- a/ci-scripts/Jenkinsfile-GitLab-Docker +++ /dev/null @@ -1,470 +0,0 @@ -#!/bin/groovy -/* - * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The OpenAirInterface Software Alliance licenses this file to You under - * the OAI Public License, Version 1.1 (the "License"); you may not use this file - * except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.openairinterface.org/?page_id=698 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - *------------------------------------------------------------------------------- - * For more information about the OpenAirInterface (OAI) Software Alliance: - * contact@openairinterface.org - */ - -//------------------------------------------------------------------------------- -// Abstraction function to send social media messages: -// like on Slack or Mattermost -def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) { - if (params.pipelineUsesSlack != null) { - if (params.pipelineUsesSlack) { - slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage - } - } -} - -// Location of the CN executor node -def cn_ci_host = params.Host_CN_CI_Server - -// for lock -def cn_ci_resource = params.DockerContainers -def ds_tester_ci_resource = params.DsTester - -// When triggered by upstream, specify which tag to use -def upstreamTagToUse = params.upstreamTagToUse - -// Location of the CN tester -def dsT_host_flag = false -def dsT_host = "" -def dsT_host_user = "" - -// dsTester tag to use -def dsTesterTag = params.DSTESTER_TAG - -// Flags -def scmEvent = false -def upstreamEvent = false -def deployed = true -def mini = false -def basic = false -def slice = false - -// Default tags / branches --> could be passed on by upstream job or by PR content -def nrfTag = params.nrfTag -def nrfBranch = params.nrfBranch -def amfTag = params.amfTag -def amfBranch = params.amfBranch -def smfTag = params.smfTag -def smfBranch = params.smfBranch -def spgwuTag = params.spgwuTag -def spgwuBranch = params.spgwuBranch - -def ausfTag = params.ausfTag -def ausfBranch = params.ausfBranch -def udmTag = params.udmTag -def udmBranch = params.udmBranch -def udrTag = params.udrTag -def udrBranch = params.udrBranch -def nssfTag = params.nssfTag -def nssfBranch = params.nssfBranch - -def upfVppBranch = 'develop' - -//------------------------------------------------------------------------------- -// Pipeline start -pipeline { - agent { - label cn_ci_host - } - options { - disableConcurrentBuilds() - timestamps() - ansiColor('xterm') - lock(cn_ci_resource) - } - stages { - stage ('Verify Parameters') { - steps { - script { - echo '\u2705 \u001B[32mVerify Parameters\u001B[0m' - - JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"' - JOB_TIMESTAMP = JOB_TIMESTAMP.trim() - - echo "Node is ${NODE_NAME}" - - if (params.DS_Tester_Server_Flag != null) { - dsT_host_flag = params.DS_Tester_Server_Flag - if (dsT_host_flag) { - def allParametersPresent = true - if (params.DS_Tester_Server_Name == null) { - allParametersPresent = false - } else { - dsT_host = params.DS_Tester_Server_Name - } - if (params.DS_Tester_Server_Login == null) { - allParametersPresent = false - } else { - dsT_host_user = params.DS_Tester_Server_Login - } - if (allParametersPresent) { - echo "DS Tester is on ${dsT_host}" - } else { - echo "Some DS Tester parameters are missing!" - sh "./ci-scripts/fail.sh" - } - } - } - - // Find out the cause of the trigger - for (cause in currentBuild.getBuildCauses()) { - if (cause.toString() ==~ /.*UpstreamCause.*/) { - upstreamEvent = true - //} else { - // scmEvent = true - } - } - - if (upstreamEvent) { - if (params.NRF_TAG != null) { - nrfTag = params.NRF_TAG - echo "Upstream Job passed NRF_TAG to use: ${nrfTag}" - mini = true - basic = true - } - if (params.NRF_BRANCH != null) { - nrfBranch = params.NRF_BRANCH - echo "Upstream Job passed NRF_BRANCH to use: ${nrfBranch}" - } - if (params.AMF_TAG != null) { - amfTag = params.AMF_TAG - echo "Upstream Job passed AMF_TAG to use: ${amfTag}" - mini = true - basic = true - slice = true - } - if (params.AMF_BRANCH != null) { - amfBranch = params.AMF_BRANCH - echo "Upstream Job passed AMF_BRANCH to use: ${amfBranch}" - } - if (params.SMF_TAG != null) { - smfTag = params.SMF_TAG - echo "Upstream Job passed SMF_TAG to use: ${smfTag}" - mini = true - basic = true - } - if (params.SMF_BRANCH != null) { - smfBranch = params.SMF_BRANCH - echo "Upstream Job passed SMF_BRANCH to use: ${smfBranch}" - } - if (params.SPGWU_TAG != null) { - spgwuTag = params.SPGWU_TAG - echo "Upstream Job passed SPGWU_TAG to use: ${spgwuTag}" - mini = true - basic = true - } - if (params.SPGWU_BRANCH != null) { - spgwuBranch = params.SPGWU_BRANCH - echo "Upstream Job passed SPGWU_BRANCH to use: ${spgwuBranch}" - } - if (params.AUSF_TAG != null) { - ausfTag = params.AUSF_TAG - echo "Upstream Job passed AUSF_TAG to use: ${ausfTag}" - basic = true - } - if (params.AUSF_BRANCH != null) { - ausfBranch = params.AUSF_BRANCH - echo "Upstream Job passed AUSF_BRANCH to use: ${ausfBranch}" - } - if (params.UDM_TAG != null) { - udmTag = params.UDM_TAG - echo "Upstream Job passed UDM_TAG to use: ${udmTag}" - basic = true - } - if (params.UDM_BRANCH != null) { - udmBranch = params.UDM_BRANCH - echo "Upstream Job passed UDM_BRANCH to use: ${udmBranch}" - } - if (params.UDR_TAG != null) { - udrTag = params.UDR_TAG - echo "Upstream Job passed UDR_TAG to use: ${udrTag}" - basic = true - } - if (params.UDR_BRANCH != null) { - udrBranch = params.UDR_BRANCH - echo "Upstream Job passed UDR_BRANCH to use: ${udrBranch}" - } - if (params.NSSF_TAG != null) { - nssfTag = params.NSSF_TAG - echo "Upstream Job passed NSSF_TAG to use: ${nssfTag}" - slice = true - } - if (params.NSSF_BRANCH != null) { - nssfBranch = params.NSSF_BRANCH - echo "Upstream Job passed NSSF_BRANCH to use: ${nssfBranch}" - } - sh "git clean -x -d -f > /dev/null 2>&1" - sh "git fetch --prune > /dev/null 2>&1" - sh 'git checkout -f ' + upstreamTagToUse - sh "zip -r -qq oai-cn5g-fed.zip .git" - sh "mkdir -p archives" - sh './scripts/syncComponents.sh --nrf-branch ' + nrfBranch + ' --amf-branch ' + amfBranch + ' --smf-branch ' + smfBranch + ' --spgwu-tiny-branch ' + spgwuBranch + ' --ausf-branch ' + ausfBranch + ' --udm-branch ' + udmBranch + ' --udr-branch ' + udrBranch + ' --upf-vpp-branch ' + upfVppBranch + ' --nssf-branch ' + nssfBranch - } - if (scmEvent) { - mini = true - basic = true - slice = true - sh "git clean -x -d -f > /dev/null 2>&1" - if ("MERGE".equals(env.gitlabActionType)) { - sh "./ci-scripts/doGitLabMerge.sh --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}" - } - sh "zip -r -qq oai-cn5g-fed.zip .git" - sh "mkdir -p archives" - sh './scripts/syncComponents.sh --nrf-branch develop --amf-branch develop --smf-branch develop --spgwu-tiny-branch develop --ausf-branch develop --udm-branch develop --udr-branch develop --upf-vpp-branch develop --nssf-branch develop' - } - if ((!upstreamEvent) && (!scmEvent)) { - mini = true - basic = true - slice = true - sh "git clean -x -d -f > /dev/null 2>&1" - sh './scripts/syncComponents.sh --nrf-branch ' + nrfBranch + ' --amf-branch ' + amfBranch + ' --smf-branch ' + smfBranch + ' --spgwu-tiny-branch ' + spgwuBranch + ' --ausf-branch ' + ausfBranch + ' --udm-branch ' + udmBranch + ' --udr-branch ' + udrBranch + ' --upf-vpp-branch ' + upfVppBranch + ' --nssf-branch ' + nssfBranch - sh "mkdir -p archives" - } - // Verify that the images are available - try { - sh 'echo "OAI_NRF_TAG: oai-nrf:' + nrfTag +'" > archives/oai_nrf_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-nrf:' + nrfTag + ' >> archives/oai_nrf_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-nrf:' + nrfTag + ' >> archives/oai_nrf_image_info.log' - } catch (Exception e) { - error "OAI NRF Image tag to test does not exist!" - } - try { - sh 'echo "OAI_AMF_TAG: oai-amf:' + amfTag +'" > archives/oai_amf_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-amf:' + amfTag + ' >> archives/oai_amf_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-amf:' + amfTag + ' >> archives/oai_amf_image_info.log' - } catch (Exception e) { - error "OAI AMF Image tag to test does not exist!" - } - try { - sh 'echo "OAI_SMF_TAG: oai-smf:' + smfTag +'" > archives/oai_smf_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-smf:' + smfTag + ' >> archives/oai_smf_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-smf:' + smfTag + ' >> archives/oai_smf_image_info.log' - } catch (Exception e) { - error "OAI SMF Image tag to test does not exist!" - } - try { - sh 'echo "OAI_SPGWU_TAG: oai-spgwu-tiny:' + spgwuTag +'" > archives/oai_spgwu_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-spgwu-tiny:' + spgwuTag + ' >> archives/oai_spgwu_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-spgwu-tiny:' + spgwuTag + ' >> archives/oai_spgwu_image_info.log' - } catch (Exception e) { - error "OAI SPGW-U-Tiny Image tag to test does not exist!" - } - try { - sh 'echo "OAI_AUSF_TAG: oai-ausf:' + ausfTag +'" > archives/oai_ausf_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-ausf:' + ausfTag + ' >> archives/oai_ausf_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-ausf:' + ausfTag + ' >> archives/oai_ausf_image_info.log' - } catch (Exception e) { - error "OAI AUSF Image tag to test does not exist!" - } - try { - sh 'echo "OAI_UDM_TAG: oai-udm:' + udmTag +'" > archives/oai_udm_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-udm:' + udmTag + ' >> archives/oai_udm_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-udm:' + udmTag + ' >> archives/oai_udm_image_info.log' - } catch (Exception e) { - error "OAI UDM Image tag to test does not exist!" - } - try { - sh 'echo "OAI_UDR_TAG: oai-udr:' + udrTag +'" > archives/oai_udr_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-udr:' + udrTag + ' >> archives/oai_udr_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-udr:' + udrTag + ' >> archives/oai_udr_image_info.log' - } catch (Exception e) { - error "OAI UDR Image tag to test does not exist!" - } - try { - sh 'echo "OAI_NSSF_TAG: oai-nssf:' + nssfTag +'" > archives/oai_nssf_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-nssf:' + nssfTag + ' >> archives/oai_nssf_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-nssf:' + nssfTag + ' >> archives/oai_nssf_image_info.log' - } catch (Exception e) { - error "OAI NSSF Image tag to test does not exist!" - } - } - } - } - stage ('Deploy 5G Core Network (mini) and Test with DS Tester') { - when { - allOf { - expression {dsT_host_flag} - expression {mini == true} - } - } - steps { - lock (ds_tester_ci_resource) { - script { - echo '\u2705 \u001B[32mDeploy CN5G-Minimal using Docker-Compose and Testing with DS Tester\u001B[0m' - sh "mkdir -p RESULTS-MINI" - if (fileExists("dstester")) { - sh "rm -Rf dstester > /dev/null 2>&1" - } - sh "mkdir -p dstester" - dir ('dstester') { - withCredentials([ - [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.dsTesterGitLabRepository_Credentials}", usernameVariable: 'git_username', passwordVariable: 'git_token'] - ]) { - sh "git clone https://${git_username}:${git_token}@github.com/OPENAIRINTERFACE/chasseur.git . > ../git_clone.log 2>&1" - sh "git checkout -f " + dsTesterTag + " >> ../git_clone.log 2>&1" - // First put all correct tags to test - sh 'sed -i -e "s#NRF_TAG#' + nrfTag + '#" -i -e "s#AMF_TAG#' + amfTag + '#" -i -e "s#SMF_TAG#' + smfTag + '#" -i -e "s#SPGWU_TAG#' + spgwuTag + '#" ./jenkins/suits/dc/integration/integration-mini.yaml' - // Check the route b/w the container and DSTester n/w and add - sh 'python3 ../ci-scripts/routeCheck.py --mode Add --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-mini/integration.tplt' - dir ('jenkins') { - try { - sh "python3 ./chasseurdocker.py -f ./suits/dc/integration/integration-mini.yaml | tee ../../RESULTS-MINI/dsTester_Summary_mini.txt" - } catch (Exception e) { - currentBuild.result = 'FAILURE' - echo "dsTester FrameWork FAILED" - } - } - sh 'python3 ../ci-scripts/routeCheck.py --mode Delete --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-mini/integration.tplt' - } - } - try { - sh "python3 ./ci-scripts/toCheckDSTesterResult.py --type mini" - } catch (Exception e) { - currentBuild.result = 'FAILURE' - echo "dsTester Mini ResultCheck FAILED" - } - } - } - } - } - stage ('Deploy Whole 5G Core Network (Basic) and Test with DS Tester') { - when { - allOf { - expression {dsT_host_flag} - expression {basic == true} - } - } - steps { - lock (ds_tester_ci_resource) { - script { - echo '\u2705 \u001B[32mDeploy CN5G-Basic using Docker-Compose and Testing with DS Tester\u001B[0m' - sh "mkdir -p RESULTS-BASIC" - if (fileExists("dstester")) { - sh "rm -Rf dstester > /dev/null 2>&1" - } - sh "mkdir -p dstester" - dir ('dstester') { - withCredentials([ - [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.dsTesterGitLabRepository_Credentials}", usernameVariable: 'git_username', passwordVariable: 'git_token'] - ]) { - sh "git clone https://${git_username}:${git_token}@github.com/OPENAIRINTERFACE/chasseur.git . > ../git_clone.log 2>&1" - sh "git checkout -f " + dsTesterTag + " >> ../git_clone.log 2>&1" - // First put all correct tags to test - sh 'sed -i -e "s#NRF_TAG#' + nrfTag + '#" -i -e "s#AMF_TAG#' + amfTag + '#" -i -e "s#SMF_TAG#' + smfTag + '#" -i -e "s#UPF_TAG#' + spgwuTag + '#" -i -e "s#AUSF_TAG#' + ausfTag + '#" -i -e "s#UDM_TAG#' + udmTag + '#" -i -e "s#UDR_TAG#' + udrTag + '#" ./jenkins/suits/dc/integration/integration-basic.yaml' - // Check the route b/w the container and DSTester n/w and add - sh 'python3 ../ci-scripts/routeCheck.py --mode Add --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-basic/integration.tplt' - dir ('jenkins') { - try { - sh "python3 ./chasseurdocker.py -f ./suits/dc/integration/integration-basic.yaml | tee ../../RESULTS-BASIC/dsTester_Summary_basic.txt" - } catch (Exception e) { - currentBuild.result = 'FAILURE' - echo "dsTester FrameWork FAILED" - } - } - sh 'python3 ../ci-scripts/routeCheck.py --mode Delete --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-basic/integration.tplt' - } - } - try{ - sh "python3 ./ci-scripts/toCheckDSTesterResult.py --type basic" - } catch (Exception e) { - currentBuild.result = 'FAILURE' - echo "dsTester Basic ResultCheck FAILED" - } - } - } - } - } - stage ('Deploy 5G Core Network (slice) and Test with DS Tester') { - when { - allOf { - expression {dsT_host_flag} - expression {slice == true} - } - } - steps { - lock (ds_tester_ci_resource) { - script { - echo '\u2705 \u001B[32mDeploy CN5G-Slicing using Docker-Compose and Testing with DS Tester\u001B[0m' - sh "mkdir -p RESULTS-SLICE" - if (fileExists("dstester")) { - sh "rm -Rf dstester > /dev/null 2>&1" - } - sh "mkdir -p dstester" - dir ('dstester') { - withCredentials([ - [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.dsTesterGitLabRepository_Credentials}", usernameVariable: 'git_username', passwordVariable: 'git_token'] - ]) { - sh "git clone https://${git_username}:${git_token}@github.com/OPENAIRINTERFACE/chasseur.git . > ../git_clone.log 2>&1" - sh "git checkout -f " + dsTesterTag + " >> ../git_clone.log 2>&1" - // First put all correct tags to test - sh 'sed -i -e "s#NRF_TAG#' + nrfTag + '#" -i -e "s#AMF_TAG#' + amfTag + '#" -i -e "s#SMF_TAG#' + smfTag + '#" -i -e "s#UPF_TAG#' + spgwuTag + '#" -i -e "s#AUSF_TAG#' + ausfTag + '#" -i -e "s#UDM_TAG#' + udmTag + '#" -i -e "s#UDR_TAG#' + udrTag + '#" -i -e "s#NSSF_TAG#' + nssfTag + '#" ./jenkins/suits/dc/integration/integration-slice.yaml' - // Check the route b/w the container and DSTester n/w and add - sh 'python3 ../ci-scripts/routeCheck.py --mode Add --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-slice/integration.tplt' - dir ('jenkins') { - try { - sh "python3 ./chasseurdocker.py -f ./suits/dc/integration/integration-slice.yaml | tee ../../RESULTS-SLICE/dsTester_Summary_slice.txt" - } catch (Exception e) { - currentBuild.result = 'FAILURE' - echo "dsTester FrameWork FAILED" - } - } - sh 'python3 ../ci-scripts/routeCheck.py --mode Delete --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-slice/integration.tplt' - } - } - try { - sh "python3 ./ci-scripts/toCheckDSTesterResult.py --type slice" - } catch (Exception e) { - currentBuild.result = 'FAILURE' - echo "dsTester Slicing ResultCheck FAILED" - } - } - } - } - } - } - post { - always { - script { - // Generating the HTML report - if (mini == true){ - sh 'python3 ./ci-scripts/dsTestGenerateHTMLReport.py --job_name ' + JOB_NAME + ' --job_id ' + BUILD_ID + ' --job_url ' + BUILD_URL + ' --type mini' - } - if (slice == true){ - sh 'python3 ./ci-scripts/dsTestGenerateHTMLReport.py --job_name ' + JOB_NAME + ' --job_id ' + BUILD_ID + ' --job_url ' + BUILD_URL + ' --type slice' - } - if (basic == true){ - sh 'python3 ./ci-scripts/dsTestGenerateHTMLReport.py --job_name ' + JOB_NAME + ' --job_id ' + BUILD_ID + ' --job_url ' + BUILD_URL + ' --type basic' - } - - // Zipping all archived log files - sh "zip -r -qq cn5g_fed_docker_logs.zip archives RESULTS*" - if (fileExists('cn5g_fed_docker_logs.zip')) { - archiveArtifacts artifacts: 'cn5g_fed_docker_logs.zip' - } - listOfFiles = sh returnStdout: true, script: 'ls test_results*.html' - String[] htmlFiles = listOfFiles.split("\\n") - for (htmlFile in htmlFiles) { - archiveArtifacts artifacts: htmlFile - } - } - } - } -} diff --git a/ci-scripts/Jenkinsfile-GitLab-Docker-Basic b/ci-scripts/Jenkinsfile-GitLab-Docker-Basic deleted file mode 100644 index 434df7937e5b157878bbcbf5fe647302bec523a5..0000000000000000000000000000000000000000 --- a/ci-scripts/Jenkinsfile-GitLab-Docker-Basic +++ /dev/null @@ -1,314 +0,0 @@ -#!/bin/groovy -/* - * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The OpenAirInterface Software Alliance licenses this file to You under - * the OAI Public License, Version 1.1 (the "License"); you may not use this file - * except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.openairinterface.org/?page_id=698 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - *------------------------------------------------------------------------------- - * For more information about the OpenAirInterface (OAI) Software Alliance: - * contact@openairinterface.org - */ - -//------------------------------------------------------------------------------- -// Abstraction function to send social media messages: -// like on Slack or Mattermost -def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) { - if (params.pipelineUsesSlack != null) { - if (params.pipelineUsesSlack) { - slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage - } - } -} - -// Location of the CN executor node -def cn_ci_host = params.Host_CN_CI_Server - -// for lock -def cn_ci_resource = params.DockerContainers -def ds_tester_ci_resource = params.DsTester - -// When triggered by upstream, specify which tag to use -def upstreamTagToUse = params.upstreamTagToUse - -// Location of the CN tester -def dsT_host_flag = false -def dsT_host = "" -def dsT_host_user = "" - -// dsTester tag to use -def dsTesterTag = params.DSTESTER_TAG - -// Flags -def scmEvent = false -def upstreamEvent = false -def deployed = true - -// Default tags / branches --> could be passed on by upstream job or by PR content -def nrfTag = params.nrfTag -def nrfBranch = params.nrfBranch -def amfTag = params.amfTag -def amfBranch = params.amfBranch -def smfTag = params.smfTag -def smfBranch = params.smfBranch -def spgwuTag = params.spgwuTag -def spgwuBranch = params.spgwuBranch - -def ausfTag = params.ausfTag -def ausfBranch = params.ausfBranch -def udmTag = params.udmTag -def udmBranch = params.udmBranch -def udrTag = params.udrTag -def udrBranch = params.udrBranch - -//------------------------------------------------------------------------------- -// Pipeline start -pipeline { - agent { - label cn_ci_host - } - options { - disableConcurrentBuilds() - timestamps() - ansiColor('xterm') - lock(cn_ci_resource) - } - stages { - stage ('Verify Parameters') { - steps { - script { - echo '\u2705 \u001B[32mVerify Parameters\u001B[0m' - - JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"' - JOB_TIMESTAMP = JOB_TIMESTAMP.trim() - - echo "Node is ${NODE_NAME}" - - if (params.DS_Tester_Server_Flag != null) { - dsT_host_flag = params.DS_Tester_Server_Flag - if (dsT_host_flag) { - def allParametersPresent = true - if (params.DS_Tester_Server_Name == null) { - allParametersPresent = false - } else { - dsT_host = params.DS_Tester_Server_Name - } - if (params.DS_Tester_Server_Login == null) { - allParametersPresent = false - } else { - dsT_host_user = params.DS_Tester_Server_Login - } - if (allParametersPresent) { - echo "DS Tester is on ${dsT_host}" - } else { - echo "Some DS Tester parameters are missing!" - sh "./ci-scripts/fail.sh" - } - } - } - - // Find out the cause of the trigger - for (cause in currentBuild.getBuildCauses()) { - if (cause.toString() ==~ /.*UpstreamCause.*/) { - upstreamEvent = true - //} else { - // scmEvent = true - } - } - - if (upstreamEvent) { - if (params.NRF_TAG != null) { - nrfTag = params.NRF_TAG - echo "Upstream Job passed NRF_TAG to use: ${nrfTag}" - } - if (params.NRF_BRANCH != null) { - nrfBranch = params.NRF_BRANCH - echo "Upstream Job passed NRF_BRANCH to use: ${nrfBranch}" - } - if (params.AMF_TAG != null) { - amfTag = params.AMF_TAG - echo "Upstream Job passed AMF_TAG to use: ${amfTag}" - } - if (params.AMF_BRANCH != null) { - amfBranch = params.AMF_BRANCH - echo "Upstream Job passed AMF_BRANCH to use: ${amfBranch}" - } - if (params.SMF_TAG != null) { - smfTag = params.SMF_TAG - echo "Upstream Job passed SMF_TAG to use: ${smfTag}" - } - if (params.SMF_BRANCH != null) { - smfBranch = params.SMF_BRANCH - echo "Upstream Job passed SMF_BRANCH to use: ${smfBranch}" - } - if (params.SPGWU_TAG != null) { - spgwuTag = params.SPGWU_TAG - echo "Upstream Job passed SPGWU_TAG to use: ${spgwuTag}" - } - if (params.SPGWU_BRANCH != null) { - spgwuBranch = params.SPGWU_BRANCH - echo "Upstream Job passed SPGWU_BRANCH to use: ${spgwuBranch}" - } - if (params.AUSF_TAG != null) { - ausfTag = params.AUSF_TAG - echo "Upstream Job passed AUSF_TAG to use: ${ausfTag}" - } - if (params.AUSF_BRANCH != null) { - ausfBranch = params.AUSF_BRANCH - echo "Upstream Job passed AUSF_TAG to use: ${ausfBranch}" - } - if (params.UDM_TAG != null) { - udmTag = params.UDM_TAG - echo "Upstream Job passed UDM_TAG to use: ${udmTag}" - } - if (params.UDM_BRANCH != null) { - udmBranch = params.UDM_BRANCH - echo "Upstream Job passed UDM_TAG to use: ${udmBranch}" - } - if (params.UDR_TAG != null) { - udrTag = params.UDR_TAG - echo "Upstream Job passed UDR_TAG to use: ${udrTag}" - } - if (params.UDR_BRANCH != null) { - udrBranch = params.UDR_BRANCH - echo "Upstream Job passed UDR_TAG to use: ${udrBranch}" - } - sh "git clean -x -d -f > /dev/null 2>&1" - sh "git fetch --prune > /dev/null 2>&1" - sh 'git checkout -f ' + upstreamTagToUse - sh "zip -r -qq oai-cn5g-fed.zip .git" - sh "mkdir -p archives DS-TEST-RESULTS" - sh './scripts/syncComponents.sh --nrf-branch ' + nrfBranch + ' --amf-branch ' + amfBranch + ' --smf-branch ' + smfBranch + ' --spgwu-tiny-branch ' + spgwuBranch + ' --ausf-branch ' + ausfBranch + ' --udm-branch ' + udmBranch + ' --udr-branch ' + udrBranch - } - if (scmEvent) { - sh "git clean -x -d -f > /dev/null 2>&1" - if ("MERGE".equals(env.gitlabActionType)) { - sh "./ci-scripts/doGitLabMerge.sh --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}" - } - sh "zip -r -qq oai-cn5g-fed.zip .git" - sh "mkdir -p archives DS-TEST-RESULTS" - sh './scripts/syncComponents.sh --nrf-branch develop --amf-branch develop --smf-branch develop --spgwu-tiny-branch develop --ausf-branch develop --udm-branch develop --udr-branch develop' - } - if ((!upstreamEvent) && (!scmEvent)) { - sh "git clean -x -d -f > /dev/null 2>&1" - sh './scripts/syncComponents.sh --nrf-branch ' + nrfBranch + ' --amf-branch ' + amfBranch + ' --smf-branch ' + smfBranch + ' --spgwu-tiny-branch ' + spgwuBranch + ' --ausf-branch ' + ausfBranch + ' --udm-branch ' + udmBranch + ' --udr-branch ' + udrBranch - sh "mkdir -p archives DS-TEST-RESULTS" - } - // Verify that the images are available - try { - sh 'echo "OAI_NRF_TAG: oai-nrf:' + nrfTag +'" > archives/oai_nrf_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-nrf:' + nrfTag + ' >> archives/oai_nrf_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-nrf:' + nrfTag + ' >> archives/oai_nrf_image_info.log' - } catch (Exception e) { - error "OAI NRF Image tag to test does not exist!" - } - try { - sh 'echo "OAI_AMF_TAG: oai-amf:' + amfTag +'" > archives/oai_amf_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-amf:' + amfTag + ' >> archives/oai_amf_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-amf:' + amfTag + ' >> archives/oai_amf_image_info.log' - } catch (Exception e) { - error "OAI AMF Image tag to test does not exist!" - } - try { - sh 'echo "OAI_SMF_TAG: oai-smf:' + smfTag +'" > archives/oai_smf_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-smf:' + smfTag + ' >> archives/oai_smf_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-smf:' + smfTag + ' >> archives/oai_smf_image_info.log' - } catch (Exception e) { - error "OAI SMF Image tag to test does not exist!" - } - try { - sh 'echo "OAI_SPGWU_TAG: oai-spgwu-tiny:' + spgwuTag +'" > archives/oai_spgwu_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-spgwu-tiny:' + spgwuTag + ' >> archives/oai_spgwu_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-spgwu-tiny:' + spgwuTag + ' >> archives/oai_spgwu_image_info.log' - } catch (Exception e) { - error "OAI SPGW-U-Tiny Image tag to test does not exist!" - } - try { - sh 'echo "OAI_AUSF_TAG: oai-ausf:' + ausfTag +'" > archives/oai_ausf_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-ausf:' + ausfTag + ' >> archives/oai_ausf_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-ausf:' + ausfTag + ' >> archives/oai_ausf_image_info.log' - } catch (Exception e) { - error "OAI AUSF Image tag to test does not exist!" - } - try { - sh 'echo "OAI_UDM_TAG: oai-udm:' + udmTag +'" > archives/oai_udm_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-udm:' + udmTag + ' >> archives/oai_udm_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-udm:' + udmTag + ' >> archives/oai_udm_image_info.log' - } catch (Exception e) { - error "OAI UDM Image tag to test does not exist!" - } - try { - sh 'echo "OAI_UDR_TAG: oai-udr:' + udrTag +'" > archives/oai_udr_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-udr:' + udrTag + ' >> archives/oai_udr_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-udr:' + udrTag + ' >> archives/oai_udr_image_info.log' - } catch (Exception e) { - error "OAI UDR Image tag to test does not exist!" - } - } - } - } - stage ('Deploy Whole 5G Core Network and Test with DS Tester') { - when { expression {dsT_host_flag} } - steps { - lock (ds_tester_ci_resource) { - script { - echo '\u2705 \u001B[32mDeploy CN5G using Docker-Compose and Testing with DS Tester\u001B[0m' - if (fileExists("dstester")) { - sh "rm -Rf dstester > /dev/null 2>&1" - } - sh "mkdir -p dstester" - dir ('dstester') { - withCredentials([ - [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.dsTesterGitLabRepository_Credentials}", usernameVariable: 'git_username', passwordVariable: 'git_token'] - ]) { - sh "git clone https://${git_username}:${git_token}@github.com/OPENAIRINTERFACE/chasseur.git . > ../git_clone.log 2>&1" - sh "git checkout -f " + dsTesterTag + " >> ../git_clone.log 2>&1" - // First put all correct tags to test - sh 'sed -i -e "s#NRF_TAG#' + nrfTag + '#" -i -e "s#AMF_TAG#' + amfTag + '#" -i -e "s#SMF_TAG#' + smfTag + '#" -i -e "s#UPF_TAG#' + spgwuTag + '#" -i -e "s#AUSF_TAG#' + ausfTag + '#" -i -e "s#UDM_TAG#' + udmTag + '#" -i -e "s#UDR_TAG#' + udrTag + '#" ./jenkins/suits/dc/integration/integration-basic.yaml' - // Check the route b/w the container and DSTester n/w and add - sh 'python3 ../ci-scripts/routeCheck.py --mode Add --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-basic/integration.tplt' - dir ('jenkins') { - try { - sh "python3 ./chasseurdocker.py -f ./suits/dc/integration/integration-basic.yaml | tee ../../DS-TEST-RESULTS/dsTester_Summary.txt" - } catch (Exception e) { - currentBuild.result = 'FAILURE' - echo "dsTester FrameWork FAILED" - } - } - sh 'python3 ../ci-scripts/routeCheck.py --mode Delete --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-basic/integration.tplt' - } - } - sh "python3 ./ci-scripts/toCheckDSTesterResult.py" - } - } - } - } - } - post { - always { - script { - // Generating the HTML report - sh 'python3 ./ci-scripts/dsTestGenerateHTMLReport.py --job_name ' + JOB_NAME + ' --job_id ' + BUILD_ID + ' --job_url ' + BUILD_URL - - // Zipping all archived log files - sh "zip -r -qq cn5g_fed_docker_logs.zip archives DS-TEST-RESULTS" - if (fileExists('cn5g_fed_docker_logs.zip')) { - archiveArtifacts artifacts: 'cn5g_fed_docker_logs.zip' - } - if (fileExists('test_results_oai_cn5g_basic.html')) { - archiveArtifacts artifacts: 'test_results_oai_cn5g_basic.html' - } - } - } - } -} diff --git a/ci-scripts/Jenkinsfile-GitLab-Docker-Check b/ci-scripts/Jenkinsfile-GitLab-Docker-Check deleted file mode 100644 index 3c2a2e6a5aa7b0c858725b1e0bc288124ebea90e..0000000000000000000000000000000000000000 --- a/ci-scripts/Jenkinsfile-GitLab-Docker-Check +++ /dev/null @@ -1,259 +0,0 @@ -#!/bin/groovy -/* - * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The OpenAirInterface Software Alliance licenses this file to You under - * the OAI Public License, Version 1.1 (the "License"); you may not use this file - * except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.openairinterface.org/?page_id=698 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - *------------------------------------------------------------------------------- - * For more information about the OpenAirInterface (OAI) Software Alliance: - * contact@openairinterface.org - */ - -//------------------------------------------------------------------------------- -// Abstraction function to send social media messages: -// like on Slack or Mattermost -def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) { - if (params.pipelineUsesSlack != null) { - if (params.pipelineUsesSlack) { - slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage - } - } -} - -// Location of the CN executor node -def cn_ci_host = params.Host_CN_CI_Server - -// for lock -def cn_ci_resource = params.DockerContainers - -//------------------------------------------------------------------------------- -// Pipeline start -pipeline { - agent { - label cn_ci_host - } - options { - disableConcurrentBuilds() - timestamps() - ansiColor('xterm') - lock(cn_ci_resource) - } - stages { - stage ('Verify Parameters') { - steps { - script { - echo '\u2705 \u001B[32mVerify Parameters\u001B[0m' - - JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"' - JOB_TIMESTAMP = JOB_TIMESTAMP.trim() - - sh "git clean -x -d -f > /dev/null 2>&1" - sh 'git log -n1' - if (params.CheckAnotherBranch != null) { - if (params.CheckAnotherBranch) { - if (params.BranchToCheck != null) { - sh 'git checkout '+ params.BranchToCheck - sh 'git log -n1' - } - } - } - - sh "mkdir -p archives/cn5g" - sh "mkdir -p archives/cn5gwithnoNRF" - sh "mkdir -p archives/gnbsim" - } - } - } - stage ('Deploy Whole 5G Core Network with NRF') { - steps { - script { - echo '\u2705 \u001B[32mDeploy CN5G using Docker-Compose with NRF\u001B[0m' - dir('docker-compose') { - sh 'sed -i -e "s@latest@develop@g" docker-compose.yaml' - sh 'docker-compose -f docker-compose.yaml up -d > ../archives/cn5g/compose_5gcn_up.log 2>&1' - sh 'sleep 100' - // Do a check on number of healthy containers - // 5 == mysql + nrf + amf + smf + upf(spgwu-tiny) - ret = sh returnStdout: true, script: 'docker-compose ps -a | grep -v unhealthy | grep -c healthy || true' - ret = ret.trim() - if (ret != '5') { - error "Deployment went wrong!" - } - } - } - } - post { - always { - script { - sh 'docker logs oai-nrf > archives/cn5g/oai_nrf.log 2>&1 || true' - sh 'docker logs oai-amf > archives/cn5g/oai_amf.log 2>&1 || true' - sh 'docker logs oai-smf > archives/cn5g/oai_smf.log 2>&1 || true' - sh 'docker logs oai-spgwu > archives/cn5g/oai_spgwu.log 2>&1 || true' - } - } - success { - script { - sh 'echo "DEPLOYMENT: OK"' - } - } - unsuccessful { - script { - sh 'echo "DEPLOYMENT: KO"' - } - } - } - } - stage ('Undeploy 5G-CN with NRF') { - steps { - script { - echo '\u2705 \u001B[32mUn-Deploy CN5G with NRF\u001B[0m' - dir('docker-compose') { - sh 'docker-compose down > ../archives/cn5g/compose_normal_down.log 2>&1' - } - } - } - } - - stage ('Deploy Whole 5G Core Network without NRF') { - steps { - script { - echo '\u2705 \u001B[32mDeploy CN5G using Docker-Compose without NRF\u001B[0m' - dir('docker-compose') { - sh 'sed -i -e "s@latest@develop@g" docker-compose-no-nrf.yaml' - sh 'docker-compose -f docker-compose-no-nrf.yaml up -d > ../archives/cn5gwithnoNRF/compose_5gcn_up.log 2>&1' - sh 'sleep 100' - // Do a check on number of healthy containers - // 4 == mysql + amf + smf + upf(spgwu-tiny) - ret = sh returnStdout: true, script: 'docker-compose -f docker-compose-no-nrf.yaml ps -a | grep -v unhealthy | grep -c healthy || true' - ret = ret.trim() - if (ret != '4') { - error "Deployment went wrong without NRF!" - } - } - } - } - post { - always { - script { - sh 'docker logs oai-amf > archives/cn5gwithnoNRF/oai_amf.log 2>&1 || true' - sh 'docker logs oai-smf > archives/cn5gwithnoNRF/oai_smf.log 2>&1 || true' - sh 'docker logs oai-spgwu > archives/cn5gwithnoNRF/oai_spgwu.log 2>&1 || true' - } - } - success { - script { - sh 'echo "DEPLOYMENT: OK without NRF"' - } - } - unsuccessful { - script { - dir('docker-compose') { - sh 'docker-compose -f docker-compose-no-nrf.yaml down || true' - } - sh 'echo "DEPLOYMENT: KO without NRF"' - } - } - } - } - stage ('Undeploy 5G-CN without NRF') { - steps { - script { - echo '\u2705 \u001B[32mUn-Deploy CN5G without NRF\u001B[0m' - dir('docker-compose') { - sh 'docker-compose -f docker-compose-no-nrf.yaml down > ../archives/cn5gwithnoNRF/compose_normal_down.log 2>&1' - } - } - } - } - - stage ('gnbsim tutorial') { - steps { - script { - echo '\u2705 \u001B[32mDeploy CN5G using Docker-Compose with NRF\u001B[0m' - dir('docker-compose') { - sh 'docker-compose -f docker-compose.yaml up -d > ../archives/gnbsim/compose_5gcn_up.log 2>&1' - sh 'sleep 100' - // Do a check on number of healthy containers - // 5 == mysql + nrf + amf + smf + upf(spgwu-tiny) - ret = sh returnStdout: true, script: 'docker-compose ps -a | grep -v unhealthy | grep -c healthy || true' - ret = ret.trim() - if (ret != '5') { - error "Deployment went wrong!" - } - sh 'docker-compose -f docker-compose-gnbsim.yaml up -d gnbsim > ../archives/gnbsim/gnbsim_up.log 2>&1' - sh 'sleep 20' - // Do a check if gnbsim is healthy - ret = sh returnStdout: true, script: 'docker-compose -f docker-compose-gnbsim.yaml ps -a | grep -v unhealthy | grep -c healthy || true' - ret = ret.trim() - if (ret != '1') { - error "gnbsim deployment went wrong" - } - } - } - } - post { - always { - script { - sh 'docker logs oai-nrf > archives/gnbsim/oai_nrf.log 2>&1 || true' - sh 'docker logs oai-amf > archives/gnbsim/oai_amf.log 2>&1 || true' - sh 'docker logs oai-smf > archives/gnbsim/oai_smf.log 2>&1 || true' - sh 'docker logs oai-spgwu > archives/gnbsim/oai_spgwu.log 2>&1 || true' - sh 'docker logs gnbsim > archives/gnbsim/gnbsim.log 2>&1 || true' - } - } - success { - script { - sh 'echo "DEPLOYMENT: OK"' - } - } - unsuccessful { - script { - dir('docker-compose') { - sh 'docker-compose -f docker-compose-gnbsim.yaml down || true' - sh 'docker-compose down || true' - } - sh 'echo "DEPLOYMENT: KO"' - } - } - } - } - stage ('Un-deploy gnbsim tutorial') { - steps { - script { - dir('docker-compose') { - sh 'docker-compose -f docker-compose-gnbsim.yaml down > ../archives/gnbsim/gnbsim_down.log 2>&1' - sh 'docker-compose down >> ../archives/gnbsim/cn5g_down.log 2>&1' - } - } - } - } - } - - post { - always { - script { - // Remove any leftover containers/networks - dir('docker-compose') { - sh 'docker-compose -f docker-compose-gnbsim.yaml down || true' - sh 'docker-compose down || true' - } - // Zipping all archived log files - sh "zip -r -qq cn5g_deploy_docker_logs.zip archives" - if (fileExists('cn5g_deploy_docker_logs.zip')) { - archiveArtifacts artifacts: 'cn5g_deploy_docker_logs.zip' - } - } - } - } -} diff --git a/ci-scripts/Jenkinsfile-GitLab-Docker-Mini b/ci-scripts/Jenkinsfile-GitLab-Docker-Mini deleted file mode 100644 index fa097d7261df57b46bfdb10c743aa0a5c531d62e..0000000000000000000000000000000000000000 --- a/ci-scripts/Jenkinsfile-GitLab-Docker-Mini +++ /dev/null @@ -1,262 +0,0 @@ -#!/bin/groovy -/* - * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The OpenAirInterface Software Alliance licenses this file to You under - * the OAI Public License, Version 1.1 (the "License"); you may not use this file - * except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.openairinterface.org/?page_id=698 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - *------------------------------------------------------------------------------- - * For more information about the OpenAirInterface (OAI) Software Alliance: - * contact@openairinterface.org - */ - -//------------------------------------------------------------------------------- -// Abstraction function to send social media messages: -// like on Slack or Mattermost -def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) { - if (params.pipelineUsesSlack != null) { - if (params.pipelineUsesSlack) { - slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage - } - } -} - -// Location of the CN executor node -def cn_ci_host = params.Host_CN_CI_Server - -// for lock -def cn_ci_resource = params.DockerContainers -def ds_tester_ci_resource = params.DsTester - -// When triggered by upstream, specify which tag to use -def upstreamTagToUse = params.upstreamTagToUse - -// Location of the CN tester -def dsT_host_flag = false -def dsT_host = "" -def dsT_host_user = "" - -// dsTester tag to use -def dsTesterTag = params.DSTESTER_TAG - -// Flags -def scmEvent = false -def upstreamEvent = false -def deployed = true - -// Default tags / branches --> could be passed on by upstream job or by PR content -def nrfTag = params.nrfTag -def nrfBranch = params.nrfBranch -def amfTag = params.amfTag -def amfBranch = params.amfBranch -def smfTag = params.smfTag -def smfBranch = params.smfBranch -def spgwuTag = params.spgwuTag -def spgwuBranch = params.spgwuBranch - -//------------------------------------------------------------------------------- -// Pipeline start -pipeline { - agent { - label cn_ci_host - } - options { - disableConcurrentBuilds() - timestamps() - ansiColor('xterm') - lock(cn_ci_resource) - } - stages { - stage ('Verify Parameters') { - steps { - script { - echo '\u2705 \u001B[32mVerify Parameters\u001B[0m' - - JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"' - JOB_TIMESTAMP = JOB_TIMESTAMP.trim() - - echo "Node is ${NODE_NAME}" - - if (params.DS_Tester_Server_Flag != null) { - dsT_host_flag = params.DS_Tester_Server_Flag - if (dsT_host_flag) { - def allParametersPresent = true - if (params.DS_Tester_Server_Name == null) { - allParametersPresent = false - } else { - dsT_host = params.DS_Tester_Server_Name - } - if (params.DS_Tester_Server_Login == null) { - allParametersPresent = false - } else { - dsT_host_user = params.DS_Tester_Server_Login - } - if (allParametersPresent) { - echo "DS Tester is on ${dsT_host}" - } else { - echo "Some DS Tester parameters are missing!" - sh "./ci-scripts/fail.sh" - } - } - } - - // Find out the cause of the trigger - for (cause in currentBuild.getBuildCauses()) { - if (cause.toString() ==~ /.*UpstreamCause.*/) { - upstreamEvent = true - //} else { - // scmEvent = true - } - } - - if (upstreamEvent) { - if (params.NRF_TAG != null) { - nrfTag = params.NRF_TAG - echo "Upstream Job passed NRF_TAG to use: ${nrfTag}" - } - if (params.NRF_BRANCH != null) { - nrfBranch = params.NRF_BRANCH - echo "Upstream Job passed NRF_BRANCH to use: ${nrfBranch}" - } - if (params.AMF_TAG != null) { - amfTag = params.AMF_TAG - echo "Upstream Job passed AMF_TAG to use: ${amfTag}" - } - if (params.AMF_BRANCH != null) { - amfBranch = params.AMF_BRANCH - echo "Upstream Job passed AMF_BRANCH to use: ${amfBranch}" - } - if (params.SMF_TAG != null) { - smfTag = params.SMF_TAG - echo "Upstream Job passed SMF_TAG to use: ${smfTag}" - } - if (params.SMF_BRANCH != null) { - smfBranch = params.SMF_BRANCH - echo "Upstream Job passed SMF_BRANCH to use: ${smfBranch}" - } - if (params.SPGWU_TAG != null) { - spgwuTag = params.SPGWU_TAG - echo "Upstream Job passed SPGWU_TAG to use: ${spgwuTag}" - } - if (params.SPGWU_BRANCH != null) { - spgwuBranch = params.SPGWU_BRANCH - echo "Upstream Job passed SPGWU_BRANCH to use: ${spgwuBranch}" - } - sh "git clean -x -d -f > /dev/null 2>&1" - sh "git fetch --prune > /dev/null 2>&1" - sh 'git checkout -f ' + upstreamTagToUse - sh "zip -r -qq oai-cn5g-fed.zip .git" - sh "mkdir -p archives DS-TEST-RESULTS" - sh './scripts/syncComponents.sh --nrf-branch ' + nrfBranch + ' --amf-branch ' + amfBranch + ' --smf-branch ' + smfBranch + ' --spgwu-tiny-branch ' + spgwuBranch - } - if (scmEvent) { - sh "git clean -x -d -f > /dev/null 2>&1" - if ("MERGE".equals(env.gitlabActionType)) { - sh "./ci-scripts/doGitLabMerge.sh --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}" - } - sh "zip -r -qq oai-cn5g-fed.zip .git" - sh "mkdir -p archives DS-TEST-RESULTS" - sh './scripts/syncComponents.sh --nrf-branch develop --amf-branch develop --smf-branch develop --spgwu-tiny-branch develop' - } - if ((!upstreamEvent) && (!scmEvent)) { - sh "git clean -x -d -f > /dev/null 2>&1" - sh './scripts/syncComponents.sh --nrf-branch ' + nrfBranch + ' --amf-branch ' + amfBranch + ' --smf-branch ' + smfBranch + ' --spgwu-tiny-branch ' + spgwuBranch - sh "mkdir -p archives DS-TEST-RESULTS" - } - // Verify that the images are available - try { - sh 'echo "OAI_NRF_TAG: oai-nrf:' + nrfTag +'" > archives/oai_nrf_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-nrf:' + nrfTag + ' >> archives/oai_nrf_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-nrf:' + nrfTag + ' >> archives/oai_nrf_image_info.log' - } catch (Exception e) { - error "OAI NRF Image tag to test does not exist!" - } - try { - sh 'echo "OAI_AMF_TAG: oai-amf:' + amfTag +'" > archives/oai_amf_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-amf:' + amfTag + ' >> archives/oai_amf_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-amf:' + amfTag + ' >> archives/oai_amf_image_info.log' - } catch (Exception e) { - error "OAI AMF Image tag to test does not exist!" - } - try { - sh 'echo "OAI_SMF_TAG: oai-smf:' + smfTag +'" > archives/oai_smf_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-smf:' + smfTag + ' >> archives/oai_smf_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-smf:' + smfTag + ' >> archives/oai_smf_image_info.log' - } catch (Exception e) { - error "OAI SMF Image tag to test does not exist!" - } - try { - sh 'echo "OAI_SPGWU_TAG: oai-spgwu-tiny:' + spgwuTag +'" > archives/oai_spgwu_image_info.log' - sh 'docker image inspect --format=\'Size = {{.Size}} bytes\' oai-spgwu-tiny:' + spgwuTag + ' >> archives/oai_spgwu_image_info.log' - sh 'docker image inspect --format=\'Date = {{.Created}}\' oai-spgwu-tiny:' + spgwuTag + ' >> archives/oai_spgwu_image_info.log' - } catch (Exception e) { - error "OAI SPGW-U-Tiny Image tag to test does not exist!" - } - } - } - } - stage ('Deploy 5G Core Network and Test with DS Tester') { - when { expression {dsT_host_flag} } - steps { - lock (ds_tester_ci_resource) { - script { - echo '\u2705 \u001B[32mDeploy CN5G using Docker-Compose and Testing with DS Tester\u001B[0m' - if (fileExists("dstester")) { - sh "rm -Rf dstester > /dev/null 2>&1" - } - sh "mkdir -p dstester" - dir ('dstester') { - withCredentials([ - [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.dsTesterGitLabRepository_Credentials}", usernameVariable: 'git_username', passwordVariable: 'git_token'] - ]) { - sh "git clone https://${git_username}:${git_token}@github.com/OPENAIRINTERFACE/chasseur.git . > ../git_clone.log 2>&1" - sh "git checkout -f " + dsTesterTag + " >> ../git_clone.log 2>&1" - // First put all correct tags to test - sh 'sed -i -e "s#NRF_TAG#' + nrfTag + '#" -i -e "s#AMF_TAG#' + amfTag + '#" -i -e "s#SMF_TAG#' + smfTag + '#" -i -e "s#SPGWU_TAG#' + spgwuTag + '#" ./jenkins/suits/dc/integration/integration-mini.yaml' - // Check the route b/w the container and DSTester n/w and add - sh 'python3 ../ci-scripts/routeCheck.py --mode Add --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-mini/integration.tplt' - dir ('jenkins') { - try { - sh "python3 ./chasseurdocker.py -f ./suits/dc/integration/integration-mini.yaml | tee ../../DS-TEST-RESULTS/dsTester_Summary.txt" - } catch (Exception e) { - currentBuild.result = 'FAILURE' - echo "dsTester FrameWork FAILED" - } - } - sh 'python3 ../ci-scripts/routeCheck.py --mode Delete --userName ' + dsT_host_user + ' --hostName ' + dsT_host + ' --docker_compose docker-compose/integration/integration-mini/integration.tplt' - } - } - sh "python3 ./ci-scripts/toCheckDSTesterResult.py" - } - } - } - } - } - post { - always { - script { - // Generating the HTML report - sh 'python3 ./ci-scripts/dsTestGenerateHTMLReport2.py --job_name ' + JOB_NAME + ' --job_id ' + BUILD_ID + ' --job_url ' + BUILD_URL - - // Zipping all archived log files - sh "zip -r -qq cn5g_fed_docker_logs.zip archives DS-TEST-RESULTS" - if (fileExists('cn5g_fed_docker_logs.zip')) { - archiveArtifacts artifacts: 'cn5g_fed_docker_logs.zip' - } - if (fileExists('test_results_oai_cn5g_mini.html')) { - archiveArtifacts artifacts: 'test_results_oai_cn5g_mini.html' - } - } - } - } -} diff --git a/ci-scripts/Jenkinsfile-GitLab-Helm b/ci-scripts/Jenkinsfile-GitLab-Helm deleted file mode 100644 index 93dc5cbedfc7e05f329526440d277807710ee584..0000000000000000000000000000000000000000 --- a/ci-scripts/Jenkinsfile-GitLab-Helm +++ /dev/null @@ -1,272 +0,0 @@ -#!/bin/groovy -/* - * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The OpenAirInterface Software Alliance licenses this file to You under - * the OAI Public License, Version 1.1 (the "License"); you may not use this file - * except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.openairinterface.org/?page_id=698 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - *------------------------------------------------------------------------------- - * For more information about the OpenAirInterface (OAI) Software Alliance: - * contact@openairinterface.org - */ - -//------------------------------------------------------------------------------- -// Abstraction function to send social media messages: -// like on Slack or Mattermost -def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) { - if (params.pipelineUsesSlack != null) { - if (params.pipelineUsesSlack) { - slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage - } - } -} - -// Location of the CN executor node -def cn_ci_host = params.Host_CN_CI_Server - -// for lock -def cn_ci_resource = params.CN_CI_Resource -def ds_tester_ci_resource = params.DsTester - -// When triggered by upstream, specify which tag to use -def upstreamTagToUse = params.upstreamTagToUse - -// Location of the CN tester -def dsT_host_flag = false -def dsT_host = "" -def dsT_host_user = "" -def dsT_host_ip_addr = "" - -// dsTester tag to use -def dsTesterTag = params.DSTESTER_TAG - -// Flags -def scmEvent = false -def upstreamEvent = false - -// Default tags --> could be passed on by upstream job or by PR content -def nrfTag = params.nrfTag -def amfTag = params.amfTag -def smfTag = params.smfTag -def spgwuTag = params.spgwuTag -def udrTag = params.udrTag -def udmTag = params.udmTag -def ausfTag = params.ausfTag - -//------------------------------------------------------------------------------- -// Pipeline start -pipeline { - agent { - label cn_ci_host - } - options { - disableConcurrentBuilds() - timestamps() - ansiColor('xterm') - lock(cn_ci_resource) - } - stages { - stage ('Verify Parameters') { - steps { - script { - echo '\u2705 \u001B[32mVerify Parameters\u001B[0m' - - JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"' - JOB_TIMESTAMP = JOB_TIMESTAMP.trim() - - def allParametersPresent = true - if (params.OC_Credentials == null) { - allParametersPresent = false - } - if (params.OC_ProjectName == null) { - allParametersPresent = false - } - if (allParametersPresent) { - echo "Cluster Access parameters are present" - } else { - echo "Some Cluster Access parameters are missing" - sh "./ci-scripts/fail.sh" - } - if (params.DS_Tester_Server_Flag != null) { - dsT_host_flag = params.DS_Tester_Server_Flag - if (dsT_host_flag) { - allParametersPresent = true - if (params.DS_Tester_Server_Name == null) { - allParametersPresent = false - } else { - dsT_host = params.DS_Tester_Server_Name - } - if (params.DS_Tester_Server_Login == null) { - allParametersPresent = false - } else { - dsT_host_user = params.DS_Tester_Server_Login - } - if (params.DS_Tester_Server_IP_Addr == null) { - allParametersPresent = false - } else { - dsT_host_ip_addr = params.DS_Tester_Server_IP_Addr - } - if (params.dsTesterGitLabRepository_Credentials == null) { - allParametersPresent = false - } - if (allParametersPresent) { - echo "DS Tester is on ${dsT_host}" - } else { - echo "Some DS Tester parameters are missing!" - sh "./ci-scripts/fail.sh" - } - } - } - - // Clean workspace and prepare artifacts location - sh "git clean -x -d -f > /dev/null 2>&1" - sh "mkdir -p archives DS-TEST-RESULTS" - - // Find out the cause of the trigger - for (cause in currentBuild.getBuildCauses()) { - if (cause.toString() ==~ /.*UpstreamCause.*/) { - upstreamEvent = true - } - } - withCredentials([ - [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password'] - ]) { - if (upstreamEvent) { - if (params.NRF_TAG != null) { - nrfTag = params.NRF_TAG - echo "Upstream Job passed NRF_TAG to use: ${nrfTag}" - } - if (params.AMF_TAG != null) { - amfTag = params.AMF_TAG - echo "Upstream Job passed AMF_TAG to use: ${amfTag}" - } - if (params.SMF_TAG != null) { - smfTag = params.SMF_TAG - echo "Upstream Job passed SMF_TAG to use: ${smfTag}" - } - if (params.SPGWU_TAG != null) { - spgwuTag = params.SPGWU_TAG - echo "Upstream Job passed SPGWU_TAG to use: ${spgwuTag}" - } - if (params.UDR_TAG != null) { - udrTag = params.UDR_TAG - echo "Upstream Job passed UDR_TAG to use: ${udrTag}" - } - if (params.UDM_TAG != null) { - udmTag = params.UDM_TAG - echo "Upstream Job passed UDM_TAG to use: ${udmTag}" - } - if (params.AUSF_TAG != null) { - ausfTag = params.AUSF_TAG - echo "Upstream Job passed AUSF_TAG to use: ${ausfTag}" - } - sh "git clean -x -d -f > /dev/null 2>&1" - sh "git fetch --prune > /dev/null 2>&1" - sh 'git checkout -f ' + upstreamTagToUse - sh 'mkdir -p archives DS-TEST-RESULTS' - } - imageTags = "mysql:8.0,oai-nrf:${nrfTag},oai-udr:${udrTag},oai-udm:${udmTag},oai-ausf:${ausfTag},oai-amf:${amfTag},oai-smf:${smfTag},oai-spgwu-tiny:${spgwuTag}" - } - } - } - } - stage ('Deploy Whole 5G Core Network') { - steps { - script { - echo '\u2705 \u001B[32mDeploy CN5G on Cluster\u001B[0m' - withCredentials([ - [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password'] - ]) { - sh "python3 ci-scripts/helmDeploy.py --mode=Deploy --OCUserName=${OC_Username} --OCPassword=${OC_Password} --OCProjectName=${OC_ProjectName} --imageTags=${imageTags}" - } - } - } - } - stage ('Check with DS Tester') { - when { expression {dsT_host_flag} } - steps { - lock (ds_tester_ci_resource) { - script { - echo '\u2705 \u001B[32mTesting with DS Tester\u001B[0m' - if (fileExists("dstester")) { - sh "rm -Rf dstester > /dev/null 2>&1" - } - sh "mkdir -p dstester" - dir ('dstester') { - withCredentials([ - [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.dsTesterGitLabRepository_Credentials}", usernameVariable: 'git_username', passwordVariable: 'git_token'] - ]) { - sh "git clone https://${git_username}:${git_token}@github.com/OPENAIRINTERFACE/chasseur.git . > ../git_clone.log 2>&1" - sh "git checkout -f " + dsTesterTag + " >> ../git_clone.log 2>&1" - dir ('jenkins') { - try { - sh "python3 ./dogmatix-agent.py -f ./suits/hc/integration.yaml -d true | tee ../../DS-TEST-RESULTS/dsTester_Summary.txt" - } catch (Exception e) { - currentBuild.result = 'FAILURE' - echo "dsTester Running FAILED" - } - } - } - } - withCredentials([ - [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password'] - ]) { - try { - sh "python3 ci-scripts/helmDeploy.py --mode=GetLogs --OCUserName=${OC_Username} --OCPassword=${OC_Password} --OCProjectName=${OC_ProjectName} --imageTags=${imageTags}" - } catch (Exception e) { - echo "Unable to collect the logs, configs, pcaps" - } - } - sh "python3 ./ci-scripts/toCheckDSTesterResult1.py" - } - } - } - } - stage ('Undeploy 5G-CN') { - steps { - script { - echo '\u2705 \u001B[32mUnDeploy CN5G on Cluster\u001B[0m' - withCredentials([ - [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password'] - ]) { - sh "python3 ci-scripts/helmDeploy.py --mode=UnDeploy --OCUserName=${OC_Username} --OCPassword=${OC_Password} --OCProjectName=${OC_ProjectName} --imageTags=${imageTags}" - } - } - } - } - } - post { - always { - script { - withCredentials([ - [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password'] - ]) { - // Remove any leftover containers/networks - sh "python3 ci-scripts/helmDeploy.py --mode=UnDeploy --OCUserName=${OC_Username} --OCPassword=${OC_Password} --OCProjectName=${OC_ProjectName} --imageTags=${imageTags}" - } - // Generating the HTML report - sh 'python3 ./ci-scripts/dsTestGenerateHTMLReport1.py --job_name=' + JOB_NAME + ' --job_id=' + BUILD_ID + ' --job_url=' + BUILD_URL - - // Zipping all archived log files - sh "zip -r -qq cn5g_fed_cluster_logs.zip archives DS-TEST-RESULTS" - sh "rm -rf archives DS-TEST-RESULTS" - if (fileExists('cn5g_fed_cluster_logs.zip')) { - archiveArtifacts artifacts: 'cn5g_fed_cluster_logs.zip' - } - if (fileExists('test_results_oai_cn5g_oc.html')) { - archiveArtifacts artifacts: 'test_results_oai_cn5g_oc.html' - } - } - } - } -} diff --git a/ci-scripts/Jenkinsfile-GitLab-Load-Check b/ci-scripts/Jenkinsfile-GitLab-Load-Check index b3b9b1da105ffae759c66196de7b97f16727fa3a..ddbd5c16d7e12a85a8640a811cba320cf7b7f4e3 100644 --- a/ci-scripts/Jenkinsfile-GitLab-Load-Check +++ b/ci-scripts/Jenkinsfile-GitLab-Load-Check @@ -101,12 +101,15 @@ pipeline { upstreamJobs[UDR] = true } } - sh "git clean -x -d -f > /dev/null 2>&1" - sh "mkdir -p archives" + sh "git clean -x -d -ff > /dev/null 2>&1" + sh "git submodule foreach --recursive 'git clean -x -d -ff' > /dev/null 2>&1" + sh "git submodule deinit --force --all > /dev/null 2>&1" // For any upstream job (PR or post-merge), let run on certified tag. if (upstreamEvent) { sh 'git checkout -f ' + upstreamTagToUse } + sh 'git submodule update --init --recursive ci-scripts/common' + sh "mkdir -p archives" for (ii = 0; ii < imageNames.size(); ii++) { if (ii == NRF) { diff --git a/ci-scripts/Jenkinsfile-GitLab-NGAP-Tester b/ci-scripts/Jenkinsfile-GitLab-NGAP-Tester index 2e79c1ab5cafd2018995c944da944e1cb05a1190..b1e8af4383ef6cff5e7fe9361b422b51bdd2ee96 100644 --- a/ci-scripts/Jenkinsfile-GitLab-NGAP-Tester +++ b/ci-scripts/Jenkinsfile-GitLab-NGAP-Tester @@ -96,12 +96,15 @@ pipeline { upstreamJobs[UDR] = true } } - sh "git clean -x -d -f > /dev/null 2>&1" - sh "mkdir -p archives" + sh "git clean -x -d -ff > /dev/null 2>&1" + sh "git submodule foreach --recursive 'git clean -x -d -ff' > /dev/null 2>&1" + sh "git submodule deinit --force --all > /dev/null 2>&1" // For any upstream job (PR or post-merge), let run on certified tag. if (upstreamEvent) { sh 'git checkout -f ' + upstreamTagToUse } + sh 'git submodule update --init --recursive ci-scripts/common' + sh "mkdir -p archives" for (ii = 0; ii < imageNames.size(); ii++) { if (ii == NRF) { diff --git a/ci-scripts/Jenkinsfile-GitLab-Tutorial-Check b/ci-scripts/Jenkinsfile-GitLab-Tutorial-Check index 2ea04d823722a2e24a166bc3450e08ce8773a676..0ebe9a4bf1b43599a20d6d1b990c87e22b90e789 100644 --- a/ci-scripts/Jenkinsfile-GitLab-Tutorial-Check +++ b/ci-scripts/Jenkinsfile-GitLab-Tutorial-Check @@ -116,11 +116,14 @@ pipeline { } } sh "git clean -x -d -f > /dev/null 2>&1" - sh "mkdir -p archives" + sh "git submodule foreach --recursive 'git clean -x -d -ff' > /dev/null 2>&1" + sh "git submodule deinit --force --all > /dev/null 2>&1" // For any upstream job (PR or post-merge), let run on certified tag. if (upstreamEvent) { sh 'git checkout -f ' + upstreamTagToUse } + sh 'git submodule update --init --recursive ci-scripts/common' + sh "mkdir -p archives" for (ii = 0; ii < imageNames.size(); ii++) { if (ii == NRF) { @@ -296,7 +299,7 @@ pipeline { dir ('ci-scripts') { // Tee will make the command always pass // Please use the same log name as the folder used in the tutorial - sh './checkTutorial.py --tutorial DEPLOY_SA5G_WITH_GNBSIM.md | tee ../archives/mini-gnbsim.log' + sh './checkTutorial.py --tutorial DEPLOY_SA5G_MINI_WITH_GNBSIM.md | tee ../archives/mini-gnbsim.log' } // Checking if FAIL appears try { diff --git a/ci-scripts/charts/oai-5g-basic/values.yaml b/ci-scripts/charts/oai-5g-basic/values.yaml index 0361869a131a0fa480bc3f3ca5d3223b3adaaf4c..a889c1bce0a3388360754224e2343cf7376edbc3 100644 --- a/ci-scripts/charts/oai-5g-basic/values.yaml +++ b/ci-scripts/charts/oai-5g-basic/values.yaml @@ -14,6 +14,8 @@ oai-nrf: version: NRF_TAG ## The branch to be used to pull from dockerhub # pullPolicy: IfNotPresent or Never or Always pullPolicy: IfNotPresent + config: + logLevel: "debug" nodeSelector: {} oai-udr: enabled: true @@ -24,6 +26,8 @@ oai-udr: version: UDR_TAG ## The branch to be used to pull from dockerhub # pullPolicy: IfNotPresent or Never or Always pullPolicy: IfNotPresent + config: + logLevel: "debug" nodeSelector: {} oai-udm: enabled: true @@ -34,6 +38,8 @@ oai-udm: version: UDM_TAG ## The branch to be used to pull from dockerhub # pullPolicy: IfNotPresent or Never or Always pullPolicy: IfNotPresent + config: + logLevel: "debug" nodeSelector: {} oai-ausf: enabled: true @@ -44,6 +50,8 @@ oai-ausf: version: AUSF_TAG ## The branch to be used to pull from dockerhub # pullPolicy: IfNotPresent or Never or Always pullPolicy: IfNotPresent + config: + logLevel: "debug" nodeSelector: {} oai-amf: enabled: true @@ -78,6 +86,7 @@ oai-amf: useHttp2: "no" intAlgoList: '[ "NIA1" , "NIA2" ]' ciphAlgoList: '[ "NEA0" , "NEA1" , "NEA2" ]' + logLevel: "debug" nodeSelector: {} #Using dedicated nodeName in the chart directly oai-spgwu-tiny: enabled: true @@ -116,6 +125,7 @@ oai-spgwu-tiny: nssaiSst1: 1 # should match with SMF information (Optional, if removed slice will not be configured) nssaiSd1: "0xFFFFFF" # should match with SMF information (Optional, if removed sd value will be 0xFFFFFF only if nssaiSst1 is configured) dnn1: "ims" # should match with SMF information + logLevel: "debug" nodeSelector: {} oai-traffic-server: enabled: true @@ -170,4 +180,5 @@ oai-smf: qosProfile5qi1: 1 sessionAmbrUl1: "1000Mbps" sessionAmbrDl1: "1000Mbps" + logLevel: "debug" nodeSelector: {} diff --git a/ci-scripts/charts/oai-amf/values.yaml b/ci-scripts/charts/oai-amf/values.yaml index a2569292de155b976cb31a4c2aee2215ff66dc54..0c9ec3909791ccb4fc82a72208c7df1f35a7ba47 100644 --- a/ci-scripts/charts/oai-amf/values.yaml +++ b/ci-scripts/charts/oai-amf/values.yaml @@ -91,7 +91,7 @@ config: mySqlUser: "root" # OPTIONAL: used only if not using AUSF mySqlPass: "linux" # OPTIONAL: used only if not using AUSF mySqlDb: "oai_db" # OPTIONAL: used only if not using AUSF - logLevel: "info" + logLevel: "debug" persistent: sharedvolume: true diff --git a/ci-scripts/charts/oai-ausf/values.yaml b/ci-scripts/charts/oai-ausf/values.yaml index 680e6d3dbafe7e0c8114cf91829c9d443e9b3747..22ef450b6ff9f4c2aaeaf4d846ae25275a80657f 100644 --- a/ci-scripts/charts/oai-ausf/values.yaml +++ b/ci-scripts/charts/oai-ausf/values.yaml @@ -48,7 +48,7 @@ config: tz: "Europe/Paris" instanceId: "0" pidDirectory: "/var/run" - logLevel: "info" + logLevel: "debug" ausfName: "OAI_AUSF" sbiIfName: "eth0" sbiPortHttp1: "80" diff --git a/ci-scripts/charts/oai-nrf/values.yaml b/ci-scripts/charts/oai-nrf/values.yaml index ad06ad7348238dc00c73b82b2241f4ec725b0f29..164e576e76961ee2cf047ba5a671b420ffc3a21a 100644 --- a/ci-scripts/charts/oai-nrf/values.yaml +++ b/ci-scripts/charts/oai-nrf/values.yaml @@ -52,7 +52,7 @@ config: nrfInterfacePortForSBI: "80" nrfInterfaceHttp2PortForSBI: "8080" nrfApiVersion: "v1" - logLevel: "info" + logLevel: "debug" persistent: sharedvolume: true diff --git a/ci-scripts/charts/oai-smf/values.yaml b/ci-scripts/charts/oai-smf/values.yaml index c901c0325521aa96f9895a53acd79c93155a0d1a..57ae83496648b3aff17b083e23f56533379604cf 100644 --- a/ci-scripts/charts/oai-smf/values.yaml +++ b/ci-scripts/charts/oai-smf/values.yaml @@ -98,7 +98,7 @@ config: qosProfile5qi1: 1 sessionAmbrUl1: "1000Mbps" sessionAmbrDl1: "1000Mbps" - logLevel: "info" + logLevel: "debug" ## currently only used by tcpdump container to store the tcpdump, this volume will be shared between all the network functions (If true it should be true for all the NFs specially NRF) persistent: diff --git a/ci-scripts/charts/oai-spgwu-tiny/values.yaml b/ci-scripts/charts/oai-spgwu-tiny/values.yaml index 2ed1c617e7b870708c4180bfef80f63b7e9f6491..ffd393875329fb1e9355cb6fda939ca22da519e0 100644 --- a/ci-scripts/charts/oai-spgwu-tiny/values.yaml +++ b/ci-scripts/charts/oai-spgwu-tiny/values.yaml @@ -76,7 +76,7 @@ config: nssaiSst1: 1 # should match with SMF information (Optional, if removed slice will not be configured) nssaiSd1: "0xFFFFFF" # should match with SMF information (Optional, if removed sd value will be 0xFFFFFF only if nssaiSst1 is configured) dnn1: "ims" # should match with SMF information - logLevel: "info" + logLevel: "debug" ## currently only used by tcpdump container to store the tcpdump, this volume will be shared between all the network functions persistent: diff --git a/ci-scripts/charts/oai-udm/values.yaml b/ci-scripts/charts/oai-udm/values.yaml index e4c132491a3289981e0e1d40f7e73defdc5f053c..b839c702020fa35ca9c0e97e20a4c5a6f6843ee0 100644 --- a/ci-scripts/charts/oai-udm/values.yaml +++ b/ci-scripts/charts/oai-udm/values.yaml @@ -45,7 +45,7 @@ config: tz: "Europe/Paris" instance: 0 pidDirectory: "/var/run" - logLevel: "info" + logLevel: "debug" udmName: "oai-udm" sbiIfName: "eth0" sbiPortHttp1: "80" diff --git a/ci-scripts/charts/oai-udr/values.yaml b/ci-scripts/charts/oai-udr/values.yaml index 25069c72f2cb1be230ccc5db653d88efa49f9cb0..75789ff3ba802e8cd768c621be5376ba4d21e6c0 100644 --- a/ci-scripts/charts/oai-udr/values.yaml +++ b/ci-scripts/charts/oai-udr/values.yaml @@ -52,7 +52,7 @@ config: instance: "0" udrname: "oai-udr" pidDirectory: "/var/run" - logLevel: "info" + logLevel: "debug" sbiIfName: "eth0" sbiPortHttp1: "80" sbiPortHttp2: "8080" diff --git a/ci-scripts/checkCOTS-UE-Testing.py b/ci-scripts/checkCOTS-UE-Testing.py index b0ed388910c6917bcd1adf59569bc256813dde67..253ea01300616f5a6d178fc88cd5badacc72db46 100755 --- a/ci-scripts/checkCOTS-UE-Testing.py +++ b/ci-scripts/checkCOTS-UE-Testing.py @@ -26,9 +26,8 @@ import logging import os import re import sys -import subprocess -from generate_html import ( +from common.python.generate_html import ( generate_header, generate_footer, generate_chapter, diff --git a/ci-scripts/checkContainerStatus.py b/ci-scripts/checkContainerStatus.py index 17f4bbd1f1611f55b019966565f650116c1064dc..1bf220b4742d2d8c9e9928ea87b5c864cdeb79bb 100755 --- a/ci-scripts/checkContainerStatus.py +++ b/ci-scripts/checkContainerStatus.py @@ -26,7 +26,7 @@ import logging import re import sys import time -import cls_cmd +import common.python.cls_cmd as cls_cmd logging.basicConfig( level=logging.DEBUG, diff --git a/ci-scripts/checkLoadTestHtmlReport.py b/ci-scripts/checkLoadTestHtmlReport.py index 8316548235874573f49bd1200896e28e11f1eabd..52d00fcaaf45d0cb2d187a259ef4080003c524bb 100755 --- a/ci-scripts/checkLoadTestHtmlReport.py +++ b/ci-scripts/checkLoadTestHtmlReport.py @@ -26,9 +26,8 @@ import argparse import os import re import sys -import subprocess -from generate_html import ( +from common.python.generate_html import ( generate_header, generate_footer, generate_chapter, diff --git a/ci-scripts/checkNgapTesterHtmlReport.py b/ci-scripts/checkNgapTesterHtmlReport.py index 2d6eb564441596288112dc39a3100d837168cea0..44b709c9e35876a6f9754aa148ec555c334ec19b 100755 --- a/ci-scripts/checkNgapTesterHtmlReport.py +++ b/ci-scripts/checkNgapTesterHtmlReport.py @@ -26,9 +26,8 @@ import argparse import os import re import sys -import subprocess -from generate_html import ( +from common.python.generate_html import ( generate_header, generate_footer, generate_chapter, diff --git a/ci-scripts/checkOmecGnbsimStatus.py b/ci-scripts/checkOmecGnbsimStatus.py index 556be039dc3b9d32764c674cf287b5021f16a341..ce29fbe287dbd3d845ba464c5cbb1ddd5736dada 100755 --- a/ci-scripts/checkOmecGnbsimStatus.py +++ b/ci-scripts/checkOmecGnbsimStatus.py @@ -27,7 +27,7 @@ import re import sys import time import matplotlib.pyplot as plt -import cls_cmd +import common.python.cls_cmd as cls_cmd logging.basicConfig( level=logging.INFO, diff --git a/ci-scripts/checkTsharkCapture.py b/ci-scripts/checkTsharkCapture.py index 862e36154c5eb36e5ad38f28622e2aec0e7d53fa..a6b819eb7d8c4f636fea7cc56558d1ad13a3f104 100755 --- a/ci-scripts/checkTsharkCapture.py +++ b/ci-scripts/checkTsharkCapture.py @@ -26,7 +26,7 @@ import logging import re import sys import time -import cls_cmd +import common.python.cls_cmd as cls_cmd logging.basicConfig( level=logging.DEBUG, diff --git a/ci-scripts/checkTutorialHtmlReport.py b/ci-scripts/checkTutorialHtmlReport.py index 0615d92e6331c272e3ef0125ca24f0f295e45237..08ee14d65916ea7b1d5b378894a09b3126a3de74 100755 --- a/ci-scripts/checkTutorialHtmlReport.py +++ b/ci-scripts/checkTutorialHtmlReport.py @@ -26,9 +26,8 @@ import argparse import os import re import sys -import subprocess -from generate_html import ( +from common.python.generate_html import ( generate_header, generate_footer, generate_chapter, diff --git a/ci-scripts/cls_cmd.py b/ci-scripts/cls_cmd.py deleted file mode 100644 index 22f1a3e913a770508586cdd0a8f4b0f675b1fe04..0000000000000000000000000000000000000000 --- a/ci-scripts/cls_cmd.py +++ /dev/null @@ -1,47 +0,0 @@ -""" -Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -contributor license agreements. See the NOTICE file distributed with -this work for additional information regarding copyright ownership. -The OpenAirInterface Software Alliance licenses this file to You under -the OAI Public License, Version 1.1 (the "License"); you may not use this file -except in compliance with the License. -You may obtain a copy of the License at - - http://www.openairinterface.org/?page_id=698 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. ------------------------------------------------------------------------------- -For more information about the OpenAirInterface (OAI) Software Alliance: - contact@openairinterface.org ---------------------------------------------------------------------- -""" - -import logging -import subprocess as sp - -class LocalCmd(): - def __init__(self, d = None): - self.cwd = d - self.cp = sp.CompletedProcess(args='', returncode=0, stdout='') - - def run(self, line, timeout=300, silent=False, reportNonZero=True): - if not silent: - logging.info(line) - try: - ret = sp.run(line, shell=True, cwd=self.cwd, stdout=sp.PIPE, stderr=sp.STDOUT, timeout=timeout) - except Exception as e: - ret = sp.CompletedProcess(args=line, returncode=255, stdout=f'Exception: {str(e)}'.encode('utf-8')) - if ret.stdout is None: - ret.stdout = b'' - ret.stdout = ret.stdout.decode('utf-8').strip() - if reportNonZero and ret.returncode != 0: - logging.warning(f'command "{line}" returned non-zero returncode {ret.returncode}: output:\n{ret.stdout}') - self.cp = ret - return ret - - def close(self): - pass diff --git a/ci-scripts/common b/ci-scripts/common new file mode 160000 index 0000000000000000000000000000000000000000..dd7612c3b752325394f08b485cebe0bafad8ec15 --- /dev/null +++ b/ci-scripts/common @@ -0,0 +1 @@ +Subproject commit dd7612c3b752325394f08b485cebe0bafad8ec15 diff --git a/ci-scripts/dsTestDeployTools.py b/ci-scripts/dsTestDeployTools.py deleted file mode 100644 index 436c285aca1c06122e7b7a91f4930ce25060a240..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTestDeployTools.py +++ /dev/null @@ -1,241 +0,0 @@ -#/* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the OAI Public License, Version 1.1 (the "License"); you may not use this file -# * except in compliance with the License. -# * You may obtain a copy of the License at -# * -# * http://www.openairinterface.org/?page_id=698 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ -#--------------------------------------------------------------------- - -import os -import re -import sys -import subprocess -import time - -CICD_PRIVATE_NETWORK_RANGE='192.168.68.0/26' -CICD_PUBLIC_NETWORK_RANGE='192.168.61.192/26' - -CICD_MYSQL_PUBLIC_ADDR='192.168.61.194' -CICD_AMF_PUBLIC_ADDR='192.168.61.195' -CICD_SMF_PUBLIC_ADDR='192.168.61.196' -CICD_UPF_PUBLIC_ADDR='192.168.61.197' -CICD_DUMMY_SMF_PUBLIC_ADDR='192.168.61.200' - -class deployForDsTester(): - def __init__(self): - self.action = 'None' - self.tag = '' - self.mySqlPassword = '' - - def createNetworks(self): - # first check if already up? - try: - res = subprocess.check_output('docker network ls | egrep -c "cicd-oai-public-net|cicd-oai-private-net"', shell=True, universal_newlines=True) - if int(str(res.strip())) > 0: - self.removeNetworks() - except: - pass - - subprocess_run_w_echo('docker network create --attachable --subnet ' + CICD_PUBLIC_NETWORK_RANGE + ' --ip-range ' + CICD_PUBLIC_NETWORK_RANGE + ' cicd-oai-public-net') - subprocess_run_w_echo('docker network create --attachable --subnet ' + CICD_PRIVATE_NETWORK_RANGE + ' --ip-range ' + CICD_PRIVATE_NETWORK_RANGE + ' cicd-oai-private-net') - - def removeNetworks(self): - try: - subprocess_run_w_echo('docker network rm cicd-oai-public-net cicd-oai-private-net') - except: - pass - - def deployMySqlServer(self): - # first check if already up? If yes, remove everything. - try: - res = subprocess.check_output('docker ps -a | grep -c "cicd-mysql-svr"', shell=True, universal_newlines=True) - if int(str(res.strip())) > 0: - self.removeAllContainers() - except: - pass - - cwd = os.getcwd() - if not os.path.isfile(cwd + '/component/oai-amf/build/scripts/oai_db.sql'): - sys.exit(-1) - - subprocess_run_w_echo('docker run --name cicd-mysql-svr --network cicd-oai-public-net --ip ' + CICD_MYSQL_PUBLIC_ADDR + ' -d -e MYSQL_ROOT_PASSWORD=secretPassword mysql/mysql-server:5.7') - subprocess_run_w_echo('docker cp component/oai-amf/build/scripts/oai_db.sql cicd-mysql-svr:/home') - subprocess_run_w_echo('sed -e "s@CICD_AMF_PUBLIC_ADDR@' + CICD_AMF_PUBLIC_ADDR + '@" ci-scripts/mysql-script.cmd > ci-scripts/mysql-complete.cmd') - subprocess_run_w_echo('docker cp ci-scripts/mysql-complete.cmd cicd-mysql-svr:/home') - # waiting for the service to be properly started - time.sleep(5) - doLoop = True - while doLoop: - try: - res = subprocess.check_output('docker logs cicd-mysql-svr 2>&1', shell=True, universal_newlines=True) - startMessageFound = re.search('Starting MySQL', str(res)) - if startMessageFound is not None: - doLoop = False - except: - time.sleep(2) - pass - time.sleep(2) - subprocess_run_w_echo('docker exec cicd-mysql-svr /bin/bash -c "mysql -uroot -psecretPassword < /home/mysql-complete.cmd"') - - def deployAMF(self): - res = '' - # first check if tag exists - try: - res = subprocess.check_output('docker image inspect oai-amf:' + self.tag, shell=True, universal_newlines=True) - except: - sys.exit(-1) - - # check if there is an entrypoint - entrypoint = re.search('entrypoint', str(res)) - if entrypoint is not None: - subprocess_run_w_echo('docker run --privileged --name cicd-oai-amf --network cicd-oai-public-net --ip ' + CICD_AMF_PUBLIC_ADDR + ' -d --entrypoint "/bin/bash" oai-amf:' + self.tag + ' -c "sleep infinity"') - else: - subprocess_run_w_echo('docker run --privileged --name cicd-oai-amf --network cicd-oai-public-net --ip ' + CICD_AMF_PUBLIC_ADDR + ' -d oai-amf:' + self.tag + ' /bin/bash -c "sleep infinity"') - subprocess_run_w_echo('sed -e "s@CI_NGAP_IF_NAME@eth0@" -e "s@CI_N11_IF_NAME@eth0@" -e "s@CI_SMF0_IP_ADDRESS@' + CICD_SMF_PUBLIC_ADDR + '@" -e "s@CI_SMF1_IP_ADDRESS@' + CICD_DUMMY_SMF_PUBLIC_ADDR + '@" -e "s@CI_MYSQL_IP_ADDRESS@' + CICD_MYSQL_PUBLIC_ADDR + '@" ci-scripts/temp/generate_amf_conf.sh > ci-scripts/temp/ci-generate_amf_conf.sh') - subprocess_run_w_echo('docker cp ci-scripts/temp/ci-generate_amf_conf.sh cicd-oai-amf:/openair-amf/generate_amf_conf.sh') - subprocess_run_w_echo('docker exec cicd-oai-amf /bin/bash -c "chmod 755 generate_amf_conf.sh && ./generate_amf_conf.sh" > archives/amf_config.log') - - def deploySMF(self): - res = '' - # first check if tag exists - try: - res = subprocess.check_output('docker image inspect oai-smf:' + self.tag, shell=True, universal_newlines=True) - except: - sys.exit(-1) - - # check if there is an entrypoint - entrypoint = re.search('entrypoint', str(res)) - if entrypoint is not None: - subprocess_run_w_echo('docker run --privileged --name cicd-oai-smf --network cicd-oai-public-net --ip ' + CICD_SMF_PUBLIC_ADDR + ' -d --entrypoint "/bin/bash" oai-smf:' + self.tag + ' -c "sleep infinity"') - else: - subprocess_run_w_echo('docker run --privileged --name cicd-oai-smf --network cicd-oai-public-net --ip ' + CICD_SMF_PUBLIC_ADDR + ' -d oai-smf:' + self.tag + ' /bin/bash -c "sleep infinity"') - subprocess_run_w_echo('sed -e "s@CI_N4_IF_NAME@eth0@" -e "s@CI_SBI_IF_NAME@eth0@" -e "s@CI_AMF_IP_ADDR@' + CICD_AMF_PUBLIC_ADDR + '@" -e "s@CI_UPF_IP_ADDR@' + CICD_UPF_PUBLIC_ADDR + '@" ci-scripts/temp/generate_smf_conf.sh > ci-scripts/temp/ci-generate_smf_conf.sh') - subprocess_run_w_echo('docker cp ci-scripts/temp/ci-generate_smf_conf.sh cicd-oai-smf:/openair-smf/generate_smf_conf.sh') - subprocess_run_w_echo('docker exec cicd-oai-smf /bin/bash -c "chmod 755 generate_smf_conf.sh && ./generate_smf_conf.sh" > archives/smf_config.log') - - def deployUPF(self): - res = '' - # first check if tag exists - try: - res = subprocess.check_output('docker image inspect oai-spgwu-tiny:' + self.tag, shell=True, universal_newlines=True) - except: - sys.exit(-1) - - # check if there is an entrypoint - entrypoint = re.search('entrypoint', str(res)) - if entrypoint is not None: - subprocess_run_w_echo('docker run --privileged --name cicd-oai-upf --network cicd-oai-public-net --ip ' + CICD_UPF_PUBLIC_ADDR + ' -d --entrypoint "/bin/bash" oai-spgwu-tiny:' + self.tag + ' -c "sleep infinity"') - else: - subprocess_run_w_echo('docker run --privileged --name cicd-oai-upf --network cicd-oai-public-net --ip ' + CICD_UPF_PUBLIC_ADDR + ' -d oai-spgwu-tiny:' + self.tag + ' /bin/bash -c "sleep infinity"') - subprocess_run_w_echo('python3 ci-scripts/generate_spgwu-tiny_config_script.py --kind=SPGW-U --sxc_ip_addr=' + CICD_SMF_PUBLIC_ADDR + ' --sxu=eth0 --s1u=eth0 --sgi=eth0 --pdn_list="12.0.0.0/24 12.1.0.0/24" --prefix=/openair-spgwu-tiny/etc --from_docker_file') - subprocess_run_w_echo('docker cp ./spgw_u.conf cicd-oai-upf:/openair-spgwu-tiny/etc') - subprocess_run_w_echo('touch archives/spgwu_config.log') - - def removeAllContainers(self): - try: - subprocess_run_w_echo('docker rm -f cicd-mysql-svr cicd-oai-amf cicd-oai-smf cicd-oai-upf') - except: - pass - - -def subprocess_run_w_echo(cmd): - print(cmd) - subprocess.run(cmd, shell=True) - -def Usage(): - print('----------------------------------------------------------------------------------------------------------------------') - print('dsTestDeployTools.py') - print(' Deploy for DsTester in the pipeline.') - print('----------------------------------------------------------------------------------------------------------------------') - print('Usage: python3 dsTestDeployTools.py [options]') - print(' --help Show this help.') - print('---------------------------------------------------------------------------------------------- Mandatory Options -----') - print(' --action={CreateNetworks,RemoveNetworks,...}') - print('-------------------------------------------------------------------------------------------------------- Options -----') - print(' --tag=[Image Tag in registry]') - print('------------------------------------------------------------------------------------------------- Actions Syntax -----') - print('python3 dsTestDeployTools.py --action=CreateNetworks') - print('python3 dsTestDeployTools.py --action=RemoveNetworks') - print('python3 dsTestDeployTools.py --action=DeployMySqlServer') - print('python3 dsTestDeployTools.py --action=DeployAMF --tag=[tag]') - print('python3 dsTestDeployTools.py --action=DeploySMF --tag=[tag]') - print('python3 dsTestDeployTools.py --action=DeployUPF --tag=[tag]') - print('python3 dsTestDeployTools.py --action=RemoveAllContainers') - -#-------------------------------------------------------------------------------------------------------- -# -# Start of main -# -#-------------------------------------------------------------------------------------------------------- - -DFDT = deployForDsTester() - -argvs = sys.argv -argc = len(argvs) - -while len(argvs) > 1: - myArgv = argvs.pop(1) - if re.match('^\-\-help$', myArgv, re.IGNORECASE): - Usage() - sys.exit(0) - elif re.match('^\-\-action=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-action=(.+)$', myArgv, re.IGNORECASE) - action = matchReg.group(1) - if action != 'CreateNetworks' and \ - action != 'RemoveNetworks' and \ - action != 'DeployMySqlServer' and \ - action != 'DeployAMF' and \ - action != 'DeploySMF' and \ - action != 'DeployUPF' and \ - action != 'RemoveAllContainers': - print('Unsupported Action => ' + action) - Usage() - sys.exit(-1) - DFDT.action = action - elif re.match('^\-\-tag=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-tag=(.+)$', myArgv, re.IGNORECASE) - DFDT.tag = matchReg.group(1) - -if DFDT.action == 'CreateNetworks': - DFDT.createNetworks() -elif DFDT.action == 'RemoveNetworks': - DFDT.removeNetworks() -elif DFDT.action == 'DeployMySqlServer': - DFDT.deployMySqlServer() -elif DFDT.action == 'DeployAMF': - if DFDT.tag == '': - print('Missing OAI-AMF image tag') - Usage() - sys.exit(-1) - DFDT.deployAMF() -elif DFDT.action == 'DeploySMF': - if DFDT.tag == '': - print('Missing OAI-SMF image tag') - Usage() - sys.exit(-1) - DFDT.deploySMF() -elif DFDT.action == 'DeployUPF': - if DFDT.tag == '': - print('Missing OAI-UPF image tag') - Usage() - sys.exit(-1) - DFDT.deployUPF() -elif DFDT.action == 'RemoveAllContainers': - DFDT.removeAllContainers() - -sys.exit(0) - diff --git a/ci-scripts/dsTestGenerateHTMLReport.py b/ci-scripts/dsTestGenerateHTMLReport.py deleted file mode 100644 index 5586ffe7ad835662bffdae5fd7d694c68d424db9..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTestGenerateHTMLReport.py +++ /dev/null @@ -1,449 +0,0 @@ -#/* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the OAI Public License, Version 1.1 (the "License"); you may not use this file -# * except in compliance with the License. -# * You may obtain a copy of the License at -# * -# * http://www.openairinterface.org/?page_id=698 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ -#--------------------------------------------------------------------- - -import os -import re -import sys -import subprocess -import yaml -import argparse - - -class HtmlReport(): - def __init__(self): - self.job_name = '' - self.job_id = '' - self.job_url = '' - self.job_start_time = 'TEMPLATE_TIME' - self.type = '' - self.file_name = '' - self.path = '' - self.containers = 0 - - def _parse_args(self) -> argparse.Namespace: - """Parse the command line args - - Returns: - argparse.Namespace: the created parser - """ - parser = argparse.ArgumentParser(description='OAI HTML Report Generation for CI') - - # Jenkins Job name - parser.add_argument( - '--job_name', - action='store', - required=True, - help='Jenkins Job name', - ) - # Jenkins Job Build ID - parser.add_argument( - '--job_id', - action='store', - required=True, - help='Jenkins Job Build ID', - ) - # Jenkins Job Build URL - parser.add_argument( - '--job_url', - action='store', - required=True, - help='Jenkins Job Build URL', - ) - # Type - parser.add_argument( - '--type', - action='store', - required=True, - choices=['mini', 'basic', 'slice'], - help='Type of function', - ) - return parser.parse_args() - - def generate(self): - cwd = os.getcwd() - self.file = open(cwd + f'{self.file_name}', 'w') - self.generateHeader() - self.deploymentSummaryHeader() - finalStatus = self.testSummaryHeader() - self.testSummaryDetails() - self.testSummaryFooter() - - self.generateFooter() - self.file.close() - - if finalStatus: - sys.exit(0) - else: - print("DS-TESTER testing FAILED") - - def generateHeader(self): - # HTML Header - self.file.write('<!DOCTYPE html>\n') - self.file.write('<html class="no-js" lang="en-US">\n') - self.file.write('<head>\n') - self.file.write(' <meta name="viewport" content="width=device-width, initial-scale=1">\n') - self.file.write(' <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css">\n') - self.file.write(' <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>\n') - self.file.write(' <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"></script>\n') - self.file.write(' <title>OAI 5G Core Network Test Results for ' + self.job_name + ' job build #' + self.job_id + '</title>\n') - self.file.write('</head>\n') - self.file.write('<body><div class="container">\n') - self.file.write(' <table width = "100%" style="border-collapse: collapse; border: none;">\n') - self.file.write(' <tr style="border-collapse: collapse; border: none;">\n') - self.file.write(' <td style="border-collapse: collapse; border: none;">\n') - self.file.write(' <a href="http://www.openairinterface.org/">\n') - self.file.write(' <img src="http://www.openairinterface.org/wp-content/uploads/2016/03/cropped-oai_final_logo2.png" alt="" border="none" height=50 width=150>\n') - self.file.write(' </img>\n') - self.file.write(' </a>\n') - self.file.write(' </td>\n') - self.file.write(' <td style="border-collapse: collapse; border: none; vertical-align: center;">\n') - self.file.write(' <b><font size = "6">Job Summary -- Job: ' + self.job_name + ' -- Build-ID: <a href="' + self.job_url + '">' + self.job_id + '</a></font></b>\n') - self.file.write(' </td>\n') - self.file.write(' </tr>\n') - self.file.write(' </table>\n') - self.file.write(' <br>\n') - - def generateFooter(self): - self.file.write(' <div class="well well-lg">End of Test Report -- Copyright <span class="glyphicon glyphicon-copyright-mark"></span> 2020 <a href="http://www.openairinterface.org/">OpenAirInterface</a>. All Rights Reserved.</div>\n') - self.file.write('</div></body>\n') - self.file.write('</html>\n') - - def deploymentSummaryHeader(self): - self.file.write(' <h2>Deployment Summary</h2>\n') - passcount = 0 - failcount = 0 - restarted = 0 - cwd = os.getcwd() - if os.path.isfile(cwd + f'{self.path}'): - with open(cwd + f'{self.path}') as f: - data = yaml.full_load(f) - try: - passcount = len(data['nf-deployment']['pass']) - failcount = len(data['nf-deployment']['fail']) - except Exception as e: - pass - if passcount == self.containers: - self.file.write(' <div class="alert alert-success">\n') - self.file.write(' <strong>Successful Deployment! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - - elif failcount > 0: - self.file.write(' <div class="alert alert-danger">\n') - self.file.write(' <strong>Failed Deployment! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-warning">\n') - self.file.write(' <strong>Partial Deployment! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-warning">\n') - self.file.write(' <strong>LogFile not available! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - self.file.write(' <br>\n') - self.file.write(' <button data-toggle="collapse" data-target="#deployment-details">More details on Deployment</button>\n') - self.file.write(' <br>\n') - self.file.write(' <div id="deployment-details" class="collapse">\n') - self.file.write(' <br>\n') - self.file.write(' <table class="table-bordered" width = "80%" align = "center" border = 1>\n') - self.file.write(' <tr bgcolor = "#33CCFF" >\n') - self.file.write(' <th>Container Name</th>\n') - self.file.write(' <th>Used Image Tag</th>\n') - self.file.write(' <th>Image Creation Date</th>\n') - self.file.write(' <th>Used Image Size</th>\n') - self.file.write(' <th>Configuration Status</th>\n') - self.file.write(' </tr>\n') - self.addImageRow('mysql') - self.addImageRow('oai_amf') - if self.type == 'basic' or self.type == 'mini': - self.addImageRow('oai_nrf') - self.addImageRow('oai_smf') - self.addImageRow('oai_spgwu') - if self.type == 'basic' or self.type == 'slice': - self.addImageRow('oai_ausf') - self.addImageRow('oai_udm') - self.addImageRow('oai_udr') - if self.type == 'slice': - self.addImageRow('oai_nssf') - self.addImageRow('oai_nrf_1') - self.addImageRow('oai_nrf_2') - self.addImageRow('oai_smf_1') - self.addImageRow('oai_smf_2') - self.addImageRow('oai_upf_1') - self.addImageRow('oai_upf_2') - self.file.write(' </table>\n') - self.file.write(' </div>\n') - - def addImageRow(self, prefix): - cwd = os.getcwd() - if prefix == 'oai_nrf_1': - imageInfoPrefix = 'oai_nrf' - containerName = 'oai-nrf_1' - tagPattern = 'OAI_NRF_TAG' - statusPrefix = 'cicd-oai-nrf_1' - if prefix == 'oai_nrf_2': - imageInfoPrefix = 'oai_nrf' - containerName = 'oai-nrf_2' - tagPattern = 'OAI_NRF_TAG' - statusPrefix = 'cicd-oai-nrf_2' - if prefix == 'oai_smf_1': - imageInfoPrefix = 'oai_smf' - containerName = 'oai-smf_1' - tagPattern = 'OAI_SMF_TAG' - statusPrefix = 'cicd-oai-smf_1' - if prefix == 'oai_smf_2': - imageInfoPrefix = 'oai_smf' - containerName = 'oai-smf_2' - tagPattern = 'OAI_SMF_TAG' - statusPrefix = 'cicd-oai-smf_2' - if prefix == 'oai_upf_1': - imageInfoPrefix = 'oai_spgwu' - containerName = 'oai-spgwu-tiny_1' - tagPattern = 'OAI_SPGWU_TAG' - statusPrefix = 'cicd-oai-upf_1' - if prefix == 'oai_upf_2': - imageInfoPrefix = 'oai_spgwu' - containerName = 'oai-spgwu-tiny_2' - tagPattern = 'OAI_SPGWU_TAG' - statusPrefix = 'cicd-oai-upf_2' - if prefix == 'oai_amf': - imageInfoPrefix = 'oai_amf' - containerName = 'oai-amf' - tagPattern = 'OAI_AMF_TAG' - statusPrefix = 'cicd-oai-amf' - if prefix == 'oai_smf': - imageInfoPrefix = 'oai_smf' - containerName = 'oai-smf' - tagPattern = 'OAI_SMF_TAG' - statusPrefix = 'cicd-oai-smf' - if prefix == 'oai_nrf': - imageInfoPrefix = 'oai_nrf' - containerName = 'oai-nrf' - tagPattern = 'OAI_NRF_TAG' - statusPrefix = 'cicd-oai-nrf' - if prefix == 'oai_spgwu': - imageInfoPrefix = 'oai_spgwu' - containerName = 'oai-spgwu-tiny' - tagPattern = 'OAI_SPGWU_TAG' - statusPrefix = 'cicd-oai-upf' - if prefix == 'oai_ausf': - imageInfoPrefix = 'oai_ausf' - containerName = 'oai-ausf' - tagPattern = 'OAI_AUSF_TAG' - statusPrefix = 'cicd-oai-ausf' - if prefix == 'oai_udm': - imageInfoPrefix = 'oai_udm' - containerName = 'oai-udm' - tagPattern = 'OAI_UDM_TAG' - statusPrefix = 'cicd-oai-udm' - if prefix == 'oai_udr': - imageInfoPrefix = 'oai_udr' - containerName = 'oai-udr' - tagPattern = 'OAI_UDR_TAG' - statusPrefix = 'cicd-oai-udr' - if prefix == 'oai_nssf': - imageInfoPrefix = 'oai_nssf' - containerName = 'oai-nssf' - tagPattern = 'OAI_NSSF_TAG' - statusPrefix = 'cicd-oai-nssf' - if prefix == 'mysql': - imageInfoPrefix = 'mysql' - containerName = imageInfoPrefix - tagPattern = 'N/A' - statusPrefix = 'cicd-mysql-svr' - if os.path.isfile(cwd + '/archives/' + imageInfoPrefix + '_image_info.log'): - usedTag = '' - createDate = '' - size = '' - with open(cwd + '/archives/' + imageInfoPrefix + '_image_info.log') as imageLog: - for line in imageLog: - line = line.strip() - result = re.search(tagPattern + ': (?P<tag>[a-zA-Z0-9\-\_:]+)', line) - if result is not None: - usedTag = result.group('tag') - result = re.search('Date = (?P<date>[a-zA-Z0-9\-\_:]+)', line) - if result is not None: - createDate = result.group('date') - result = re.search('Size = (?P<size>[0-9]+) bytes', line) - if result is not None: - sizeInt = int(result.group('size')) - if sizeInt < 1000000: - sizeInt = int(sizeInt / 1000) - size = str(sizeInt) + ' kB' - else: - sizeInt = int(sizeInt / 1000000) - size = str(sizeInt) + ' MB' - imageLog.close() - configState = 'KO' - if os.path.isfile(cwd + f'{self.path}'): - with open(cwd + f'{self.path}') as f: - data = yaml.full_load(f) - try: - if statusPrefix in data['nf-deployment']['pass']: - configState = 'OK' - elif statusPrefix in data['nf-deployment']['recovered']: - configState = 'RS' - except Exception as e: - pass - self.file.write(' <tr>\n') - self.file.write(' <td>' + containerName + '</td>\n') - self.file.write(' <td>' + usedTag + '</td>\n') - self.file.write(' <td>' + createDate + '</td>\n') - self.file.write(' <td>' + size + '</td>\n') - if configState == 'OK': - self.file.write(' <td bgcolor = "DarkGreen"><b><font color="white">' + configState + '</font></b></td>\n') - elif configState == 'RS': - self.file.write(' <td bgcolor = "Orange"><b><font color="white">' + configState + '</font></b></td>\n') - else: - self.file.write(' <td bgcolor = "Red"><b><font color="white">' + configState + '</font></b></td>\n') - self.file.write(' </tr>\n') - else: - if imageInfoPrefix == 'mysql': - if os.path.isfile(cwd + f'{self.path}'): - self.file.write(' <tr>\n') - self.file.write(' <td>' + containerName + '</td>\n') - self.file.write(' <td>mysql:8.0</td>\n') - self.file.write(' <td>N/A</td>\n') - self.file.write(' <td>449MB</td>\n') - configState = 'KO' - with open(cwd + f'{self.path}') as f: - data = yaml.full_load(f) - try: - if statusPrefix in data['nf-deployment']['pass']: - configState = 'OK' - elif statusPrefix in data['nf-deployment']['recovered']: - configState = 'RS' - except Exception as e: - pass - if configState == 'OK': - self.file.write(' <td bgcolor = "DarkGreen"><b><font color="white">OK</font></b></td>\n') - elif configState == 'RS': - self.file.write(' <td bgcolor = "Orange"><b><font color="white">' + configState + '</font></b></td>\n') - else: - self.file.write(' <td bgcolor = "Red"><b><font color="white">KO</font></b></td>\n') - self.file.write(' </tr>\n') - else: - self.file.write(' <tr>\n') - self.file.write(' <td>' + containerName + '</td>\n') - self.file.write(' <td>UNKNOWN</td>\n') - self.file.write(' <td>N/A</td>\n') - self.file.write(' <td>N/A</td>\n') - self.file.write(' <td bgcolor = "DarkOrange"><b><font color="white">UNKNOW</font></b></td>\n') - self.file.write(' </tr>\n') - - def testSummaryHeader(self): - self.file.write(' <h2>DS Tester Summary</h2>\n') - cwd = os.getcwd() - finalStatusOK = False - if os.path.isfile(cwd + f'{self.path}'): - cmd = f'egrep -c "final-result: pass" {cwd}{self.path} || true' - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - finalStatusOK = True - if finalStatusOK: - self.file.write(' <div class="alert alert-success">\n') - self.file.write(' <strong>Successful DsTester suite! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-danger">\n') - self.file.write(' <strong>Failed DsTester suite! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-warning">\n') - self.file.write(' <strong>LogFile not available! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - return finalStatusOK - - def testSummaryFooter(self): - self.file.write(' <br>\n') - - def testSummaryDetails(self): - self.file.write(' <br>\n') - self.file.write(' <button data-toggle="collapse" data-target="#ds-tester-details">More details on DsTester results</button>\n') - self.file.write(' <div id="ds-tester-details" class="collapse">\n') - self.file.write(' <table class="table-bordered" width = "60%" align = "center" border = 1>\n') - self.file.write(' <tr bgcolor = "#33CCFF" >\n') - self.file.write(' <th>Test Name</th>\n') - self.file.write(' <th>Test Status</th>\n') - self.file.write(' <th>Test Details</th>\n') - self.file.write(' </tr>\n') - cwd = os.getcwd() - if os.path.isfile(cwd + f'{self.path}'): - with open(cwd + f'{self.path}') as f: - data = yaml.full_load(f) - nScenarios = len(data['scenarios']) - for scenario in range(nScenarios): - self.file.write(' <tr>\n') - self.file.write(' <td>' + data['scenarios'][scenario]['name'] + '</td>\n') - if data['scenarios'][scenario]['result'] == 'fail': - self.file.write(' <td bgcolor = "Red"><b><font color="white">KO</font></b></td>\n') - elif data['scenarios'][scenario]['result'] == 'pass': - self.file.write(' <td bgcolor = "DarkGreen"><b><font color="white">OK</font></b></td>\n') - else: - self.file.write(' <td bgcolor = "DarkOrange"><b><font color="white">UNKNOW</font></b></td>\n') - testDetails = '' - try: - if data['scenarios'][scenario]['conditions']['om_conditions'] and data['scenarios'][scenario]['conditions']['pcap_test']: - testDetails += 'Conditions: \n' - for x,y in data['scenarios'][scenario]['conditions']['om_conditions'].items(): - testDetails += ' ' + str(x) + ': ' + str(y) + '\n' - testDetails += '\npcap_test: \n' - for x,y in data['scenarios'][scenario]['conditions']['pcap_test'].items(): - testDetails += ' ' + str(x) + ': \n' - testDetails += ' ' + str(y) + '\n' - except Exception as e: - for x,y in data['scenarios'][scenario]['conditions'].items(): - testDetails += str(x) + ': ' + str(y) + '\n' - #details += '\n' - self.file.write(' <td><pre>' + testDetails + '</pre></td>\n') - self.file.write(' </tr>\n') - else: - print ('no details???') - self.file.write(' </table>\n') - self.file.write(' </div>\n') - -#-------------------------------------------------------------------------------------------------------- -# -# Start of main -# -#-------------------------------------------------------------------------------------------------------- - -HTML = HtmlReport() -args = HTML._parse_args() - -HTML.job_name = args.job_name -HTML.job_id = args.job_id -HTML.job_url = args.job_url -HTML.type = args.type -if HTML.type == 'mini': - HTML.path = '/RESULTS-MINI/mvc.yaml' - HTML.file_name = '/test_results_oai_cn5g_mini.html' - HTML.containers = 5 -elif HTML.type == 'basic': - HTML.path = '/RESULTS-BASIC/bvc.yaml' - HTML.file_name = '/test_results_oai_cn5g_basic.html' - HTML.containers = 8 -elif HTML.type == 'slice': - HTML.path = '/RESULTS-SLICE/slice.yaml' - HTML.file_name = '/test_results_oai_cn5g_slice.html' - HTML.containers = 12 -HTML.generate() diff --git a/ci-scripts/dsTestGenerateHTMLReport1.py b/ci-scripts/dsTestGenerateHTMLReport1.py deleted file mode 100644 index c66437f4fbb68a6928d92b805900f0ed8139da90..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTestGenerateHTMLReport1.py +++ /dev/null @@ -1,354 +0,0 @@ -#/* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the OAI Public License, Version 1.1 (the "License"); you may not use this file -# * except in compliance with the License. -# * You may obtain a copy of the License at -# * -# * http://www.openairinterface.org/?page_id=698 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ -#--------------------------------------------------------------------- - -import os -import re -import sys -import subprocess -import yaml - -class HtmlReport(): - def __init__(self): - self.job_name = '' - self.job_id = '' - self.job_url = '' - self.job_start_time = 'TEMPLATE_TIME' - - def generate(self): - cwd = os.getcwd() - self.file = open(cwd + '/test_results_oai_cn5g_oc.html', 'w') - self.generateHeader() - self.deploymentSummaryHeader() - finalStatus = self.testSummaryHeader() - self.testSummaryDetails() - self.testSummaryFooter() - - self.generateFooter() - self.file.close() - - if finalStatus: - sys.exit(0) - else: - print("DS-TESTER testing FAILED") - - def generateHeader(self): - # HTML Header - self.file.write('<!DOCTYPE html>\n') - self.file.write('<html class="no-js" lang="en-US">\n') - self.file.write('<head>\n') - self.file.write(' <meta name="viewport" content="width=device-width, initial-scale=1">\n') - self.file.write(' <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css">\n') - self.file.write(' <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>\n') - self.file.write(' <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"></script>\n') - self.file.write(' <title>OAI 5G Core Network Test Results for ' + self.job_name + ' job build #' + self.job_id + '</title>\n') - self.file.write('</head>\n') - self.file.write('<body><div class="container">\n') - self.file.write(' <table width = "100%" style="border-collapse: collapse; border: none;">\n') - self.file.write(' <tr style="border-collapse: collapse; border: none;">\n') - self.file.write(' <td style="border-collapse: collapse; border: none;">\n') - self.file.write(' <a href="http://www.openairinterface.org/">\n') - self.file.write(' <img src="http://www.openairinterface.org/wp-content/uploads/2016/03/cropped-oai_final_logo2.png" alt="" border="none" height=50 width=150>\n') - self.file.write(' </img>\n') - self.file.write(' </a>\n') - self.file.write(' </td>\n') - self.file.write(' <td style="border-collapse: collapse; border: none; vertical-align: center;">\n') - self.file.write(' <b><font size = "6">Job Summary -- Job: ' + self.job_name + ' -- Build-ID: <a href="' + self.job_url + '">' + self.job_id + '</a></font></b>\n') - self.file.write(' </td>\n') - self.file.write(' </tr>\n') - self.file.write(' </table>\n') - self.file.write(' <br>\n') - - def generateFooter(self): - self.file.write(' <div class="well well-lg">End of Test Report -- Copyright <span class="glyphicon glyphicon-copyright-mark"></span> 2020 <a href="http://www.openairinterface.org/">OpenAirInterface</a>. All Rights Reserved.</div>\n') - self.file.write('</div></body>\n') - self.file.write('</html>\n') - - def deploymentSummaryHeader(self): - self.file.write(' <h2>Deployment Summary</h2>\n') - cwd = os.getcwd() - if os.path.isfile(cwd + '/archives/deployment_status.log'): - cmd = 'egrep -c "DEPLOYMENT: OK" archives/deployment_status.log || true' - status = False - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - status = True - if status: - self.file.write(' <div class="alert alert-success">\n') - self.file.write(' <strong>Successful Deployment! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-danger">\n') - self.file.write(' <strong>Failed Deployment! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-warning">\n') - self.file.write(' <strong>LogFile not available! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - self.file.write(' <br>\n') - self.file.write(' <button data-toggle="collapse" data-target="#deployment-details">More details on Deployment</button>\n') - self.file.write(' <br>\n') - self.file.write(' <div id="deployment-details" class="collapse">\n') - self.file.write(' <br>\n') - self.file.write(' <table class="table-bordered" width = "80%" align = "center" border = 1>\n') - self.file.write(' <tr bgcolor = "#33CCFF" >\n') - self.file.write(' <th>Container Name</th>\n') - self.file.write(' <th>Used Image Tag</th>\n') - self.file.write(' <th>Image Creation Date</th>\n') - self.file.write(' <th>Used Image Size</th>\n') - self.file.write(' <th>Helm Status</th>\n') - self.file.write(' <th>Pod Status</th>\n') - self.file.write(' </tr>\n') - self.addImageRow('mysql') - self.addImageRow('oai-nrf') - self.addImageRow('oai-udr') - self.addImageRow('oai-udm') - self.addImageRow('oai-ausf') - self.addImageRow('oai-amf') - self.addImageRow('oai-smf') - self.addImageRow('oai-spgwu-tiny') - self.file.write(' </table>\n') - self.file.write(' </div>\n') - - def addImageRow(self, imageName): - cwd = os.getcwd() - if os.path.isfile(cwd + '/archives/' + imageName + '_image_info.log'): - usedTag = '' - createDate = '' - size = '' - with open(cwd + '/archives/' + imageName + '_image_info.log') as imageLog: - for line in imageLog: - line = line.strip() - result = re.search('IMAGENAME_TAG: (?P<tag>[a-zA-Z0-9\-\_:]+)', line) - if result is not None: - usedTag = result.group('tag') - result = re.search('Date = (?P<date>[a-zA-Z0-9\-\_:]+)', line) - if result is not None: - createDate = result.group('date') - result = re.search('Size = (?P<size>[0-9]+) bytes', line) - if result is not None: - sizeInt = int(result.group('size')) - if sizeInt < 1000000: - sizeInt = int(sizeInt / 1000) - size = str(sizeInt) + ' kB' - else: - sizeInt = int(sizeInt / 1000000) - size = str(sizeInt) + ' MB' - imageLog.close() - helmState = 'UNKNOW' - podState = 'UNKNOW' - cmd = f'egrep -c "{imageName}: HELM OK" archives/5gcn_helm_summary.txt || true' - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - helmState = 'OK' - cmd = f'egrep -c "{imageName}: HELM KO" archives/5gcn_helm_summary.txt || true' - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - helmState = 'KO' - cmd = f'egrep -c "{imageName}: POD OK" archives/5gcn_pods_summary.txt || true' - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - podState = 'OK' - cmd = f'egrep -c "{imageName}: POD KO" archives/5gcn_pods_summary.txt || true' - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - podState = 'KO' - self.file.write(' <tr>\n') - self.file.write(' <td>' + imageName + '</td>\n') - self.file.write(' <td>' + usedTag + '</td>\n') - self.file.write(' <td>' + createDate + '</td>\n') - self.file.write(' <td>' + size + '</td>\n') - if helmState == 'OK': - self.file.write(' <td bgcolor = "DarkGreen"><b><font color="white">' + helmState + '</font></b></td>\n') - elif helmState == 'KO': - self.file.write(' <td bgcolor = "Red"><b><font color="white">' + helmState + '</font></b></td>\n') - else: - self.file.write(' <td bgcolor = "DarkOrange"><b><font color="white">' + helmState + '</font></b></td>\n') - if podState == 'OK': - self.file.write(' <td bgcolor = "DarkGreen"><b><font color="white">' + podState + '</font></b></td>\n') - elif podState == 'KO': - self.file.write(' <td bgcolor = "Red"><b><font color="white">' + podState + '</font></b></td>\n') - else: - self.file.write(' <td bgcolor = "DarkOrange"><b><font color="white">' + podState + '</font></b></td>\n') - self.file.write(' </tr>\n') - else: - if imageName == 'mysql': - self.file.write(' <tr>\n') - self.file.write(' <td>mysql</td>\n') - self.file.write(' <td>mysql:8.0</td>\n') - self.file.write(' <td>N/A</td>\n') - self.file.write(' <td>449MB</td>\n') - helmState = 'UNKNOW' - podState = 'UNKNOW' - cmd = f'egrep -c "{imageName}: HELM OK" archives/5gcn_helm_summary.txt || true' - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - helmState = 'OK' - cmd = f'egrep -c "{imageName}: HELM KO" archives/5gcn_helm_summary.txt || true' - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - helmState = 'KO' - cmd = f'egrep -c "{imageName}: POD OK" archives/5gcn_pods_summary.txt || true' - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - podState = 'OK' - cmd = f'egrep -c "{imageName}: POD KO" archives/5gcn_pods_summary.txt || true' - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - podState = 'KO' - if helmState == 'OK': - self.file.write(' <td bgcolor = "DarkGreen"><b><font color="white">' + helmState + '</font></b></td>\n') - elif helmState == 'KO': - self.file.write(' <td bgcolor = "Red"><b><font color="white">' + helmState + '</font></b></td>\n') - else: - self.file.write(' <td bgcolor = "DarkOrange"><b><font color="white">' + helmState + '</font></b></td>\n') - if podState == 'OK': - self.file.write(' <td bgcolor = "DarkGreen"><b><font color="white">' + podState + '</font></b></td>\n') - elif podState == 'KO': - self.file.write(' <td bgcolor = "Red"><b><font color="white">' + podState + '</font></b></td>\n') - else: - self.file.write(' <td bgcolor = "DarkOrange"><b><font color="white">' + podState + '</font></b></td>\n') - self.file.write(' </tr>\n') - else: - self.file.write(' <tr>\n') - self.file.write(' <td>' + imageName + '</td>\n') - self.file.write(' <td>UNKNOWN</td>\n') - self.file.write(' <td>N/A</td>\n') - self.file.write(' <td>N/A</td>\n') - self.file.write(' <td bgcolor = "DarkOrange"><b><font color="white">UNKNOW</font></b></td>\n') - self.file.write(' <td bgcolor = "DarkOrange"><b><font color="white">UNKNOW</font></b></td>\n') - self.file.write(' </tr>\n') - - def testSummaryHeader(self): - self.file.write(' <h2>DS Tester Summary</h2>\n') - cwd = os.getcwd() - finalStatusOK = False - if os.path.isfile(cwd + '/DS-TEST-RESULTS/hcintegration.yaml'): - cmd = f'egrep -c "final-result: pass" DS-TEST-RESULTS/hcintegration.yaml || true' - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - finalStatusOK = True - if finalStatusOK: - self.file.write(' <div class="alert alert-success">\n') - self.file.write(' <strong>Successful DsTester suite! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-danger">\n') - self.file.write(' <strong>Failed DsTester suite! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-warning">\n') - self.file.write(' <strong>LogFile not available! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - return finalStatusOK - - def testSummaryFooter(self): - self.file.write(' <br>\n') - - def testSummaryDetails(self): - self.file.write(' <br>\n') - self.file.write(' <button data-toggle="collapse" data-target="#ds-tester-details">More details on DsTester results</button>\n') - self.file.write(' <div id="ds-tester-details" class="collapse">\n') - self.file.write(' <table class="table-bordered" width = "60%" align = "center" border = 1>\n') - self.file.write(' <tr bgcolor = "#33CCFF" >\n') - self.file.write(' <th>Test Name</th>\n') - self.file.write(' <th>Test Status</th>\n') - self.file.write(' <th>Test Details</th>\n') - self.file.write(' </tr>\n') - cwd = os.getcwd() - if os.path.isfile(cwd + '/DS-TEST-RESULTS/hcintegration.yaml'): - with open(cwd + '/DS-TEST-RESULTS/hcintegration.yaml') as f: - data = yaml.load(f) - nScenarios = len(data['scenarios']) - for scenario in range(nScenarios): - self.file.write(' <tr>\n') - self.file.write(' <td>' + data['scenarios'][scenario]['name'] + '</td>\n') - if data['scenarios'][scenario]['result'] == 'fail': - self.file.write(' <td bgcolor = "Red"><b><font color="white">KO</font></b></td>\n') - elif data['scenarios'][scenario]['result'] == 'pass': - self.file.write(' <td bgcolor = "DarkGreen"><b><font color="white">OK</font></b></td>\n') - else: - self.file.write(' <td bgcolor = "DarkOrange"><b><font color="white">UNKNOW</font></b></td>\n') - testDetails = '' - for x,y in data['scenarios'][scenario]['conditions'].items(): - testDetails += str(x) + ': ' + str(y) + '\n' - #details += '\n' - self.file.write(' <td><pre>' + testDetails + '</pre></td>\n') - self.file.write(' </tr>\n') - else: - print ('no details???') - self.file.write(' </table>\n') - self.file.write(' </div>\n') - -def Usage(): - print('----------------------------------------------------------------------------------------------------------------------') - print('dsTestGenerateHTMLReport.py') - print(' Generate an HTML report for the Jenkins pipeline on oai-cn5g-fed.') - print('----------------------------------------------------------------------------------------------------------------------') - print('Usage: python3 generateHtmlReport.py [options]') - print(' --help Show this help.') - print('---------------------------------------------------------------------------------------------- Mandatory Options -----') - print(' --job_name=[Jenkins Job name]') - print(' --job_id=[Jenkins Job Build ID]') - print(' --job_url=[Jenkins Job Build URL]') - -#-------------------------------------------------------------------------------------------------------- -# -# Start of main -# -#-------------------------------------------------------------------------------------------------------- - -argvs = sys.argv -argc = len(argvs) - -HTML = HtmlReport() - -while len(argvs) > 1: - myArgv = argvs.pop(1) - if re.match('^\-\-help$', myArgv, re.IGNORECASE): - Usage() - sys.exit(0) - elif re.match('^\-\-job_name=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-job_name=(.+)$', myArgv, re.IGNORECASE) - HTML.job_name = matchReg.group(1) - elif re.match('^\-\-job_id=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-job_id=(.+)$', myArgv, re.IGNORECASE) - HTML.job_id = matchReg.group(1) - elif re.match('^\-\-job_url=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-job_url=(.+)$', myArgv, re.IGNORECASE) - HTML.job_url = matchReg.group(1) - else: - sys.exit('Invalid Parameter: ' + myArgv) - -if HTML.job_name == '' or HTML.job_id == '' or HTML.job_url == '': - sys.exit('Missing Parameter in job description') - -HTML.generate() diff --git a/ci-scripts/dsTestGenerateHTMLReport2.py b/ci-scripts/dsTestGenerateHTMLReport2.py deleted file mode 100644 index 8a92642ef875aa26df82a2e9e31ecb6715ede9ff..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTestGenerateHTMLReport2.py +++ /dev/null @@ -1,357 +0,0 @@ -#/* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the OAI Public License, Version 1.1 (the "License"); you may not use this file -# * except in compliance with the License. -# * You may obtain a copy of the License at -# * -# * http://www.openairinterface.org/?page_id=698 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ -#--------------------------------------------------------------------- - -import os -import re -import sys -import subprocess -import yaml -import argparse - - -class HtmlReport(): - def __init__(self): - self.job_name = '' - self.job_id = '' - self.job_url = '' - self.job_start_time = 'TEMPLATE_TIME' - - def _parse_args(self) -> argparse.Namespace: - """Parse the command line args - - Returns: - argparse.Namespace: the created parser - """ - parser = argparse.ArgumentParser(description='OAI HTML Report Generation for CI') - - # Jenkins Job name - parser.add_argument( - '--job_name', - action='store', - required=True, - help='Jenkins Job name', - ) - # Jenkins Job Build ID - parser.add_argument( - '--job_id', - action='store', - required=True, - help='Jenkins Job Build ID', - ) - # Jenkins Job Build URL - parser.add_argument( - '--job_url', - action='store', - required=True, - help='Jenkins Job Build URL', - ) - return parser.parse_args() - - def generate(self): - cwd = os.getcwd() - self.file = open(cwd + '/test_results_oai_cn5g_mini.html', 'w') - self.generateHeader() - self.deploymentSummaryHeader() - finalStatus = self.testSummaryHeader() - self.testSummaryDetails() - self.testSummaryFooter() - - self.generateFooter() - self.file.close() - - if finalStatus: - sys.exit(0) - else: - print("DS-TESTER testing FAILED") - - def generateHeader(self): - # HTML Header - self.file.write('<!DOCTYPE html>\n') - self.file.write('<html class="no-js" lang="en-US">\n') - self.file.write('<head>\n') - self.file.write(' <meta name="viewport" content="width=device-width, initial-scale=1">\n') - self.file.write(' <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css">\n') - self.file.write(' <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>\n') - self.file.write(' <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"></script>\n') - self.file.write(' <title>OAI 5G Core Network Test Results for ' + self.job_name + ' job build #' + self.job_id + '</title>\n') - self.file.write('</head>\n') - self.file.write('<body><div class="container">\n') - self.file.write(' <table width = "100%" style="border-collapse: collapse; border: none;">\n') - self.file.write(' <tr style="border-collapse: collapse; border: none;">\n') - self.file.write(' <td style="border-collapse: collapse; border: none;">\n') - self.file.write(' <a href="http://www.openairinterface.org/">\n') - self.file.write(' <img src="http://www.openairinterface.org/wp-content/uploads/2016/03/cropped-oai_final_logo2.png" alt="" border="none" height=50 width=150>\n') - self.file.write(' </img>\n') - self.file.write(' </a>\n') - self.file.write(' </td>\n') - self.file.write(' <td style="border-collapse: collapse; border: none; vertical-align: center;">\n') - self.file.write(' <b><font size = "6">Job Summary -- Job: ' + self.job_name + ' -- Build-ID: <a href="' + self.job_url + '">' + self.job_id + '</a></font></b>\n') - self.file.write(' </td>\n') - self.file.write(' </tr>\n') - self.file.write(' </table>\n') - self.file.write(' <br>\n') - - def generateFooter(self): - self.file.write(' <div class="well well-lg">End of Test Report -- Copyright <span class="glyphicon glyphicon-copyright-mark"></span> 2020 <a href="http://www.openairinterface.org/">OpenAirInterface</a>. All Rights Reserved.</div>\n') - self.file.write('</div></body>\n') - self.file.write('</html>\n') - - def deploymentSummaryHeader(self): - self.file.write(' <h2>Deployment Summary</h2>\n') - passcount = 0 - failcount = 0 - restarted = 0 - cwd = os.getcwd() - if os.path.isfile(cwd + '/DS-TEST-RESULTS/mvc.yaml'): - with open(cwd + '/DS-TEST-RESULTS/mvc.yaml') as f: - data = yaml.full_load(f) - try: - passcount = len(data['nf-deployment']['pass']) - failcount = len(data['nf-deployment']['fail']) - except Exception as e: - pass - if passcount == 5: - self.file.write(' <div class="alert alert-success">\n') - self.file.write(' <strong>Successful Deployment! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - - elif failcount > 0: - self.file.write(' <div class="alert alert-danger">\n') - self.file.write(' <strong>Failed Deployment! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-warning">\n') - self.file.write(' <strong>Partial Deployment! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-warning">\n') - self.file.write(' <strong>LogFile not available! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - self.file.write(' <br>\n') - self.file.write(' <button data-toggle="collapse" data-target="#deployment-details">More details on Deployment</button>\n') - self.file.write(' <br>\n') - self.file.write(' <div id="deployment-details" class="collapse">\n') - self.file.write(' <br>\n') - self.file.write(' <table class="table-bordered" width = "80%" align = "center" border = 1>\n') - self.file.write(' <tr bgcolor = "#33CCFF" >\n') - self.file.write(' <th>Container Name</th>\n') - self.file.write(' <th>Used Image Tag</th>\n') - self.file.write(' <th>Image Creation Date</th>\n') - self.file.write(' <th>Used Image Size</th>\n') - self.file.write(' <th>Configuration Status</th>\n') - self.file.write(' </tr>\n') - self.addImageRow('mysql') - self.addImageRow('oai_nrf') - self.addImageRow('oai_amf') - self.addImageRow('oai_smf') - self.addImageRow('oai_spgwu') - self.file.write(' </table>\n') - self.file.write(' </div>\n') - - def addImageRow(self, imageInfoPrefix): - cwd = os.getcwd() - if imageInfoPrefix == 'oai_amf': - containerName = 'oai-amf' - tagPattern = 'OAI_AMF_TAG' - statusPrefix = 'cicd-oai-amf' - if imageInfoPrefix == 'oai_smf': - containerName = 'oai-smf' - tagPattern = 'OAI_SMF_TAG' - statusPrefix = 'cicd-oai-smf' - if imageInfoPrefix == 'oai_nrf': - containerName = 'oai-nrf' - tagPattern = 'OAI_NRF_TAG' - statusPrefix = 'cicd-oai-nrf' - if imageInfoPrefix == 'oai_spgwu': - containerName = 'oai-spgwu-tiny' - tagPattern = 'OAI_SPGWU_TAG' - statusPrefix = 'cicd-oai-upf' - if imageInfoPrefix == 'mysql': - containerName = imageInfoPrefix - tagPattern = 'N/A' - statusPrefix = 'cicd-mysql-svr' - if os.path.isfile(cwd + '/archives/' + imageInfoPrefix + '_image_info.log'): - usedTag = '' - createDate = '' - size = '' - with open(cwd + '/archives/' + imageInfoPrefix + '_image_info.log') as imageLog: - for line in imageLog: - line = line.strip() - result = re.search(tagPattern + ': (?P<tag>[a-zA-Z0-9\-\_:]+)', line) - if result is not None: - usedTag = result.group('tag') - result = re.search('Date = (?P<date>[a-zA-Z0-9\-\_:]+)', line) - if result is not None: - createDate = result.group('date') - result = re.search('Size = (?P<size>[0-9]+) bytes', line) - if result is not None: - sizeInt = int(result.group('size')) - if sizeInt < 1000000: - sizeInt = int(sizeInt / 1000) - size = str(sizeInt) + ' kB' - else: - sizeInt = int(sizeInt / 1000000) - size = str(sizeInt) + ' MB' - imageLog.close() - configState = 'KO' - if os.path.isfile(cwd + '/DS-TEST-RESULTS/mvc.yaml'): - with open(cwd + '/DS-TEST-RESULTS/mvc.yaml') as f: - data = yaml.full_load(f) - try: - if statusPrefix in data['nf-deployment']['pass']: - configState = 'OK' - elif statusPrefix in data['nf-deployment']['recovered']: - configState = 'RS' - except Exception as e: - pass - self.file.write(' <tr>\n') - self.file.write(' <td>' + containerName + '</td>\n') - self.file.write(' <td>' + usedTag + '</td>\n') - self.file.write(' <td>' + createDate + '</td>\n') - self.file.write(' <td>' + size + '</td>\n') - if configState == 'OK': - self.file.write(' <td bgcolor = "DarkGreen"><b><font color="white">' + configState + '</font></b></td>\n') - elif configState == 'RS': - self.file.write(' <td bgcolor = "Orange"><b><font color="white">' + configState + '</font></b></td>\n') - else: - self.file.write(' <td bgcolor = "Red"><b><font color="white">' + configState + '</font></b></td>\n') - self.file.write(' </tr>\n') - else: - if imageInfoPrefix == 'mysql': - if os.path.isfile(cwd + '/DS-TEST-RESULTS/mvc.yaml'): - self.file.write(' <tr>\n') - self.file.write(' <td>' + containerName + '</td>\n') - self.file.write(' <td>mysql:8.0</td>\n') - self.file.write(' <td>N/A</td>\n') - self.file.write(' <td>449MB</td>\n') - configState = 'KO' - with open(cwd + '/DS-TEST-RESULTS/mvc.yaml') as f: - data = yaml.full_load(f) - try: - if statusPrefix in data['nf-deployment']['pass']: - configState = 'OK' - elif statusPrefix in data['nf-deployment']['recovered']: - configState = 'RS' - except Exception as e: - pass - if configState == 'OK': - self.file.write(' <td bgcolor = "DarkGreen"><b><font color="white">OK</font></b></td>\n') - elif configState == 'RS': - self.file.write(' <td bgcolor = "Orange"><b><font color="white">' + configState + '</font></b></td>\n') - else: - self.file.write(' <td bgcolor = "Red"><b><font color="white">KO</font></b></td>\n') - self.file.write(' </tr>\n') - else: - self.file.write(' <tr>\n') - self.file.write(' <td>' + containerName + '</td>\n') - self.file.write(' <td>UNKNOWN</td>\n') - self.file.write(' <td>N/A</td>\n') - self.file.write(' <td>N/A</td>\n') - self.file.write(' <td bgcolor = "DarkOrange"><b><font color="white">UNKNOW</font></b></td>\n') - self.file.write(' </tr>\n') - - def testSummaryHeader(self): - self.file.write(' <h2>DS Tester Summary</h2>\n') - cwd = os.getcwd() - finalStatusOK = False - if os.path.isfile(cwd + '/DS-TEST-RESULTS/mvc.yaml'): - cmd = f'egrep -c "final-result: pass" DS-TEST-RESULTS/mvc.yaml || true' - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - finalStatusOK = True - if finalStatusOK: - self.file.write(' <div class="alert alert-success">\n') - self.file.write(' <strong>Successful DsTester suite! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-danger">\n') - self.file.write(' <strong>Failed DsTester suite! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-warning">\n') - self.file.write(' <strong>LogFile not available! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - return finalStatusOK - - def testSummaryFooter(self): - self.file.write(' <br>\n') - - def testSummaryDetails(self): - self.file.write(' <br>\n') - self.file.write(' <button data-toggle="collapse" data-target="#ds-tester-details">More details on DsTester results</button>\n') - self.file.write(' <div id="ds-tester-details" class="collapse">\n') - self.file.write(' <table class="table-bordered" width = "60%" align = "center" border = 1>\n') - self.file.write(' <tr bgcolor = "#33CCFF" >\n') - self.file.write(' <th>Test Name</th>\n') - self.file.write(' <th>Test Status</th>\n') - self.file.write(' <th>Test Details</th>\n') - self.file.write(' </tr>\n') - cwd = os.getcwd() - if os.path.isfile(cwd + '/DS-TEST-RESULTS/mvc.yaml'): - with open(cwd + '/DS-TEST-RESULTS/mvc.yaml') as f: - data = yaml.full_load(f) - nScenarios = len(data['scenarios']) - for scenario in range(nScenarios): - self.file.write(' <tr>\n') - self.file.write(' <td>' + data['scenarios'][scenario]['name'] + '</td>\n') - if data['scenarios'][scenario]['result'] == 'fail': - self.file.write(' <td bgcolor = "Red"><b><font color="white">KO</font></b></td>\n') - elif data['scenarios'][scenario]['result'] == 'pass': - self.file.write(' <td bgcolor = "DarkGreen"><b><font color="white">OK</font></b></td>\n') - else: - self.file.write(' <td bgcolor = "DarkOrange"><b><font color="white">UNKNOW</font></b></td>\n') - testDetails = '' - try: - if data['scenarios'][scenario]['conditions']['om_conditions'] and data['scenarios'][scenario]['conditions']['pcap_test']: - testDetails += 'Conditions: \n' - for x,y in data['scenarios'][scenario]['conditions']['om_conditions'].items(): - testDetails += ' ' + str(x) + ': ' + str(y) + '\n' - testDetails += '\npcap_test: \n' - for x,y in data['scenarios'][scenario]['conditions']['pcap_test'].items(): - testDetails += ' ' + str(x) + ': \n' - testDetails += ' ' + str(y) + '\n' - except Exception as e: - for x,y in data['scenarios'][scenario]['conditions'].items(): - testDetails += str(x) + ': ' + str(y) + '\n' - #details += '\n' - self.file.write(' <td><pre>' + testDetails + '</pre></td>\n') - self.file.write(' </tr>\n') - else: - print ('no details???') - self.file.write(' </table>\n') - self.file.write(' </div>\n') - -#-------------------------------------------------------------------------------------------------------- -# -# Start of main -# -#-------------------------------------------------------------------------------------------------------- - -HTML = HtmlReport() -args = HTML._parse_args() - -HTML.job_name = args.job_name -HTML.job_id = args.job_id -HTML.job_url = args.job_url - -HTML.generate() diff --git a/ci-scripts/dsTesterDockerCompose/amf-healthy-check.sh b/ci-scripts/dsTesterDockerCompose/amf-healthy-check.sh deleted file mode 100755 index 628091a0a1ce5845d3f703a9e50f89912db66652..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTesterDockerCompose/amf-healthy-check.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -STATUS=0 - -RESULT=$(ps aux | grep -v nohup || true) -SUB='/openair-amf/bin/oai_amf -c /openair-amf/etc/amf.conf -o' -if [[ $RESULT =~ $SUB ]]; then - STATUS=0 -else - STATUS=-1 -fi - -exit $STATUS - - diff --git a/ci-scripts/dsTesterDockerCompose/ausf-healthy-check.sh b/ci-scripts/dsTesterDockerCompose/ausf-healthy-check.sh deleted file mode 100755 index d004b773f52b4bca2c4fb13588675052d762d365..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTesterDockerCompose/ausf-healthy-check.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -STATUS=0 - -RESULT=$(ps aux | grep -v nohup || true) -SUB='/openair-ausf/bin/oai_ausf -c /openair-ausf/etc/ausf.conf -o' -if [[ $RESULT =~ $SUB ]]; then - STATUS=0 -else - STATUS=-1 -fi - -exit $STATUS diff --git a/ci-scripts/dsTesterDockerCompose/docker-compose.tplt b/ci-scripts/dsTesterDockerCompose/docker-compose.tplt deleted file mode 100644 index caaa89f738061dccf41a9dea149522f55f2a17ae..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTesterDockerCompose/docker-compose.tplt +++ /dev/null @@ -1,368 +0,0 @@ -version: '3.8' -services: - cicd_mysql: - container_name: cicd-mysql-svr - image: mysql:8.0 - ports: - - 3306 - command: --init-file /docker-entrypoint-initdb.d/oai_db.sql - volumes: - - ./oai_db.sql:/docker-entrypoint-initdb.d/oai_db.sql:rw - environment: - - TZ=Europe/Paris - - MYSQL_DATABASE=oai_db - - MYSQL_USER=test - - MYSQL_PASSWORD=test - - MYSQL_ROOT_PASSWORD=linux - healthcheck: - test: "/usr/bin/mysql --user=root --password=linux -e 'show databases;'" - interval: 10s - timeout: 5s - retries: 5 - networks: - cicd_public_net: - ipv4_address: 192.168.61.194 - - cicd_oai_udr: - container_name: cicd-oai-udr - image: oai-udr:UDR_IMAGE_TAG - ports: - - 80 - command: > - bash -c "nohup tshark -i eth0 -w /tmp/udr.pcap 2>&1 > /dev/null & - /openair-udr/bin/oai_udr -c /openair-udr/etc/udr.conf -o" - cap_add: - - NET_ADMIN - environment: - - TZ=Europe/Paris - - INSTANCE=1 - - PID_DIRECTORY=/var/run - - UDR_INTERFACE_NAME_FOR_NUDR=eth0 - - UDR_INTERFACE_PORT_FOR_NUDR=80 - - UDR_INTERFACE_HTTP2_PORT_FOR_NUDR=8080 - - UDR_API_VERSION=v2 - - MYSQL_IPV4_ADDRESS=192.168.61.194 - - MYSQL_USER=test - - MYSQL_PASS=test - - MYSQL_DB=oai_db - - WAIT_MYSQL=120 - depends_on: - - cicd_mysql - networks: - cicd_public_net: - ipv4_address: 192.168.61.200 - volumes: - - ./udr-healthy-check.sh:/openair-udr/bin/udr-healthy-check.sh - healthcheck: - test: /bin/bash -c "/openair-udr/bin/udr-healthy-check.sh" - interval: 10s - timeout: 5s - retries: 5 - - cicd_oai_udm: - container_name: cicd-oai-udm - image: oai-udm:UDM_IMAGE_TAG - ports: - - 80 - command: > - bash -c "nohup tshark -i eth0 -w /tmp/udm.pcap 2>&1 > /dev/null & - /openair-udm/bin/oai_udm -c /openair-udm/etc/udm.conf -o" - cap_add: - - NET_ADMIN - environment: - - TZ=Europe/Paris - - INSTANCE=1 - - PID_DIRECTORY=/var/run - - UDM_NAME=OAI_UDM - - SBI_IF_NAME=eth0 - - SBI_PORT=80 - - UDM_VERSION_NB=v2 - - USE_FQDN_DNS=yes - - UDR_IP_ADDRESS=192.168.61.200 - - UDR_PORT=80 - - UDR_VERSION_NB=v2 - - UDR_FQDN=cicd_oai_udr - depends_on: - - cicd_mysql - - cicd_oai_udr - networks: - cicd_public_net: - ipv4_address: 192.168.61.201 - volumes: - - ./udm-healthy-check.sh:/openair-udm/bin/udm-healthy-check.sh - healthcheck: - test: /bin/bash -c "/openair-udm/bin/udm-healthy-check.sh" - interval: 10s - timeout: 5s - retries: 5 - - cicd_oai_ausf: - container_name: cicd-oai-ausf - image: oai-ausf:AUSF_IMAGE_TAG - ports: - - 80 - command: > - bash -c "nohup tshark -i eth0 -w /tmp/ausf.pcap 2>&1 > /dev/null & - /openair-ausf/bin/oai_ausf -c /openair-ausf/etc/ausf.conf -o" - cap_add: - - NET_ADMIN - environment: - - TZ=Europe/Paris - - INSTANCE_ID=1 - - PID_DIR=/var/run - - AUSF_NAME=OAI_AUSF - - SBI_IF_NAME=eth0 - - SBI_PORT=80 - - USE_FQDN_DNS=yes - - UDM_IP_ADDRESS=192.168.61.201 - - UDM_PORT=80 - - UDM_VERSION_NB=v2 - - UDM_FQDN=cicd_oai_udm - depends_on: - - cicd_mysql - - cicd_oai_udr - - cicd_oai_udm - networks: - cicd_public_net: - ipv4_address: 192.168.61.199 - volumes: - - ./ausf-healthy-check.sh:/openair-ausf/bin/ausf-healthy-check.sh - healthcheck: - test: /bin/bash -c "/openair-ausf/bin/ausf-healthy-check.sh" - interval: 10s - timeout: 5s - retries: 5 - - cicd_oai_nrf: - container_name: cicd-oai-nrf - image: oai-nrf:NRF_IMAGE_TAG - ports: - - 80 - - 9090 - command: > - bash -c "nohup tshark -i eth0 -w /tmp/nrf.pcap 2>&1 > /dev/null & - /openair-nrf/bin/oai_nrf -c /openair-nrf/etc/nrf.conf -o" - cap_add: - - NET_ADMIN - environment: - - TZ=Europe/Paris - - NRF_INTERFACE_NAME_FOR_SBI=eth0 - - NRF_INTERFACE_PORT_FOR_SBI=80 - - NRF_INTERFACE_HTTP2_PORT_FOR_SBI=9090 - - NRF_API_VERSION=v1 - - INSTANCE=0 - - PID_DIRECTORY=/var/run - networks: - cicd_public_net: - ipv4_address: 192.168.61.195 - volumes: - - ./nrf-healthy-check.sh:/openair-nrf/bin/nrf-healthy-check.sh - healthcheck: - test: /bin/bash -c "/openair-nrf/bin/nrf-healthy-check.sh" - interval: 10s - timeout: 5s - retries: 5 - - cicd_oai_amf: - container_name: cicd-oai-amf - image: oai-amf:AMF_IMAGE_TAG - ports: - - 38412 - - 80 - command: > - bash -c "nohup tshark -i eth0 -w /tmp/amf.pcap 2>&1 > /dev/null & - /openair-amf/bin/oai_amf -c /openair-amf/etc/amf.conf -o" - cap_add: - - NET_ADMIN - environment: - - INSTANCE=1 - - PID_DIRECTORY=/var/run - - MCC=208 - - MNC=95 - - REGION_ID=128 - - AMF_SET_ID=1 - - SERVED_GUAMI_MCC_0=208 - - SERVED_GUAMI_MNC_0=95 - - SERVED_GUAMI_REGION_ID_0=128 - - SERVED_GUAMI_AMF_SET_ID_0=1 - - SERVED_GUAMI_MCC_1=460 - - SERVED_GUAMI_MNC_1=11 - - SERVED_GUAMI_REGION_ID_1=10 - - SERVED_GUAMI_AMF_SET_ID_1=1 - - PLMN_SUPPORT_MCC=208 - - PLMN_SUPPORT_MNC=95 - - PLMN_SUPPORT_TAC=0xa000 - - SST_0=222 - - SD_0=123 - - SST_1=1 - - SD_1=12 - - AMF_INTERFACE_NAME_FOR_NGAP=eth0 - - AMF_INTERFACE_NAME_FOR_N11=eth0 - - SMF_INSTANCE_ID_0=1 - - SMF_FQDN_0=cicd_oai_smf - - SMF_IPV4_ADDR_0=192.168.61.197 - - SMF_HTTP_VERSION_0=v1 - - SELECTED_0=true - - SMF_INSTANCE_ID_1=2 - - SMF_FQDN_1=cicd_oai_smf - - SMF_IPV4_ADDR_1=192.168.61.197 - - SMF_HTTP_VERSION_1=v1 - - SELECTED_1=false - - MYSQL_SERVER=cicd_mysql - - MYSQL_USER=root - - MYSQL_PASS=linux - - MYSQL_DB=oai_db - - OPERATOR_KEY=63bfa50ee6523365ff14c1f45f88737d - - NRF_IPV4_ADDRESS=192.168.61.195 - - NRF_PORT=80 - - NF_REGISTRATION=yes - - SMF_SELECTION=yes - - USE_FQDN_DNS=yes - - EXTERNAL_AUSF=yes - - NRF_API_VERSION=v1 - - NRF_FQDN=cicd_oai_nrf - - AUSF_IPV4_ADDRESS=192.168.61.199 - - AUSF_PORT=80 - - AUSF_API_VERSION=v1 - - AUSF_FQDN=cicd_oai_ausf - depends_on: - - cicd_mysql - - cicd_oai_nrf - - cicd_oai_udr - - cicd_oai_udm - - cicd_oai_ausf - networks: - cicd_public_net: - ipv4_address: 192.168.61.196 - volumes: - - ./amf-healthy-check.sh:/openair-amf/bin/amf-healthy-check.sh - healthcheck: - test: /bin/bash -c "/openair-amf/bin/amf-healthy-check.sh" - interval: 10s - timeout: 5s - retries: 5 - - cicd_oai_smf: - container_name: cicd-oai-smf - image: oai-smf:SMF_IMAGE_TAG - ports: - - 80 - - 9090 - command: > - bash -c "nohup tshark -i eth0 -w /tmp/smf.pcap 2>&1 > /dev/null & - /openair-smf/bin/oai_smf -c /openair-smf/etc/smf.conf -o" - cap_add: - - NET_ADMIN - environment: - - TZ=Europe/Paris - - INSTANCE=1 - - PID_DIRECTORY=/var/run - - SMF_INTERFACE_NAME_FOR_N4=eth0 - - SMF_INTERFACE_NAME_FOR_SBI=eth0 - - SMF_INTERFACE_PORT_FOR_SBI=80 - - SMF_INTERFACE_HTTP2_PORT_FOR_SBI=9090 - - SMF_API_VERSION=v1 - - DEFAULT_DNS_IPV4_ADDRESS=192.168.18.129 - - DEFAULT_DNS_SEC_IPV4_ADDRESS=4.4.4.4 - - AMF_IPV4_ADDRESS=192.168.61.196 - - AMF_PORT=80 - - AMF_API_VERSION=v1 - - AMF_FQDN=cicd_oai_amf - - UDM_IPV4_ADDRESS=127.0.0.1 - - UDM_PORT=80 - - UDM_API_VERSION=v2 - - UDM_FQDN=localhost - - UPF_IPV4_ADDRESS=192.168.61.198 - - UPF_FQDN_0=cicd_oai_upf - - NRF_IPV4_ADDRESS=192.168.61.195 - - NRF_PORT=80 - - NRF_API_VERSION=v1 - - NRF_FQDN=cicd_oai_nrf - - REGISTER_NRF=yes - - DISCOVER_UPF=yes - - USE_FQDN_DNS=yes - depends_on: - - cicd_oai_nrf - - cicd_oai_amf - networks: - cicd_public_net: - ipv4_address: 192.168.61.197 - volumes: - - ./smf-healthy-check.sh:/oai-cn5g-smf/bin/smf-healthy-check.sh - healthcheck: - test: /bin/bash -c "/oai-cn5g-smf/bin/smf-healthy-check.sh" - interval: 10s - timeout: 5s - retries: 5 - - cicd_oai_upf: - container_name: cicd-oai-upf - image: oai-spgwu-tiny:SPGWU_IMAGE_TAG - ports: - - 2152 - - 8805 - command: > - bash -c "nohup tshark -i eth0 -w /tmp/spgwu.pcap 2>&1 > /dev/null & - /openair-spgwu-tiny/bin/oai_spgwu -c /openair-spgwu-tiny/etc/spgw_u.conf -o" - environment: - - TZ=Europe/Paris - - PID_DIRECTORY=/var/run - - SGW_INTERFACE_NAME_FOR_S1U_S12_S4_UP=eth0 - - SGW_INTERFACE_NAME_FOR_SX=eth0 - - PGW_INTERFACE_NAME_FOR_SGI=eth0 - - NETWORK_UE_NAT_OPTION=yes - - NETWORK_UE_IP=12.0.0.0/24 - #- THREAD_S1U_PRIO=9090 - #- S1U_THREADS=16 - #- THREAD_SX_PRIO=64 - #- SX_THREADS=1 - #- THREAD_SGI_PRIO=64 - #- SGI_THREADS=16 - - SPGWC0_IP_ADDRESS=192.168.61.197 - - BYPASS_UL_PFCP_RULES=no - - MCC=208 - - MNC=95 - - MNC03=095 - - TAC=40960 - - GW_ID=1 - - REALM=openairinterface.org - - ENABLE_5G_FEATURES=yes - - REGISTER_NRF=yes - - USE_FQDN_NRF=yes - - UPF_FQDN_5G=cicd_oai_upf - - NRF_IPV4_ADDRESS=192.168.61.195 - - NRF_PORT=80 - - NRF_API_VERSION=v1 - - NRF_FQDN=cicd_oai_nrf - - NSSAI_SST_0=222 - - NSSAI_SD_0=123 - - DNN_0=default - depends_on: - - cicd_oai_nrf - - cicd_oai_smf - cap_add: - - NET_ADMIN - - SYS_ADMIN - cap_drop: - - ALL - privileged: true - networks: - cicd_public_net: - ipv4_address: 192.168.61.198 - volumes: - - ./upf-healthy-check.sh:/openair-spgwu-tiny/bin/upf-healthy-check.sh - healthcheck: - test: /bin/bash -c "/openair-spgwu-tiny/bin/upf-healthy-check.sh" - interval: 10s - timeout: 5s - retries: 5 - -networks: - cicd_public_net: - name: cicd-oai-public-net - driver: bridge - ipam: - config: - - subnet: 192.168.61.192/26 - driver_opts: - com.docker.network.bridge.name: "cicd-public" diff --git a/ci-scripts/dsTesterDockerCompose/nrf-healthy-check.sh b/ci-scripts/dsTesterDockerCompose/nrf-healthy-check.sh deleted file mode 100755 index 66ba809316bffe544f7003b908802406b5c69f15..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTesterDockerCompose/nrf-healthy-check.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -STATUS=0 - -RESULT=$(ps aux | grep -v nohup || true) -SUB='/openair-nrf/bin/oai_nrf -c /openair-nrf/etc/nrf.conf -o' -if [[ $RESULT =~ $SUB ]]; then - STATUS=0 -else - STATUS=-1 -fi - -exit $STATUS - - diff --git a/ci-scripts/dsTesterDockerCompose/oai_db.sql b/ci-scripts/dsTesterDockerCompose/oai_db.sql deleted file mode 100644 index 836cb62c2d8293c783c1bceea6ecc2b45cf8c482..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTesterDockerCompose/oai_db.sql +++ /dev/null @@ -1,309 +0,0 @@ --- phpMyAdmin SQL Dump --- version 5.1.0 --- https://www.phpmyadmin.net/ --- --- Host: 172.16.200.10:3306 --- Generation Time: Mar 22, 2021 at 10:31 AM --- Server version: 5.7.33 --- PHP Version: 7.4.15 - -SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; -START TRANSACTION; -SET time_zone = "+00:00"; - - -/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; -/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; -/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; -/*!40101 SET NAMES utf8mb4 */; - --- --- Database: `oai_db` --- - --- -------------------------------------------------------- - --- --- Table structure for table `AccessAndMobilitySubscriptionData` --- - -CREATE TABLE `AccessAndMobilitySubscriptionData` ( - `ueid` varchar(15) NOT NULL, - `servingPlmnid` varchar(15) NOT NULL, - `supportedFeatures` varchar(50) DEFAULT NULL, - `gpsis` json DEFAULT NULL, - `internalGroupIds` json DEFAULT NULL, - `sharedVnGroupDataIds` json DEFAULT NULL, - `subscribedUeAmbr` json DEFAULT NULL, - `nssai` json DEFAULT NULL, - `ratRestrictions` json DEFAULT NULL, - `forbiddenAreas` json DEFAULT NULL, - `serviceAreaRestriction` json DEFAULT NULL, - `coreNetworkTypeRestrictions` json DEFAULT NULL, - `rfspIndex` int(10) DEFAULT NULL, - `subsRegTimer` int(10) DEFAULT NULL, - `ueUsageType` int(10) DEFAULT NULL, - `mpsPriority` tinyint(1) DEFAULT NULL, - `mcsPriority` tinyint(1) DEFAULT NULL, - `activeTime` int(10) DEFAULT NULL, - `sorInfo` json DEFAULT NULL, - `sorInfoExpectInd` tinyint(1) DEFAULT NULL, - `sorafRetrieval` tinyint(1) DEFAULT NULL, - `sorUpdateIndicatorList` json DEFAULT NULL, - `upuInfo` json DEFAULT NULL, - `micoAllowed` tinyint(1) DEFAULT NULL, - `sharedAmDataIds` json DEFAULT NULL, - `odbPacketServices` json DEFAULT NULL, - `serviceGapTime` int(10) DEFAULT NULL, - `mdtUserConsent` json DEFAULT NULL, - `mdtConfiguration` json DEFAULT NULL, - `traceData` json DEFAULT NULL, - `cagData` json DEFAULT NULL, - `stnSr` varchar(50) DEFAULT NULL, - `cMsisdn` varchar(50) DEFAULT NULL, - `nbIoTUePriority` int(10) DEFAULT NULL, - `nssaiInclusionAllowed` tinyint(1) DEFAULT NULL, - `rgWirelineCharacteristics` varchar(50) DEFAULT NULL, - `ecRestrictionDataWb` json DEFAULT NULL, - `ecRestrictionDataNb` tinyint(1) DEFAULT NULL, - `expectedUeBehaviourList` json DEFAULT NULL, - `primaryRatRestrictions` json DEFAULT NULL, - `secondaryRatRestrictions` json DEFAULT NULL, - `edrxParametersList` json DEFAULT NULL, - `ptwParametersList` json DEFAULT NULL, - `iabOperationAllowed` tinyint(1) DEFAULT NULL, - `wirelineForbiddenAreas` json DEFAULT NULL, - `wirelineServiceAreaRestriction` json DEFAULT NULL -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - --- -------------------------------------------------------- - --- --- Table structure for table `Amf3GppAccessRegistration` --- - -CREATE TABLE `Amf3GppAccessRegistration` ( - `ueid` varchar(15) NOT NULL, - `amfInstanceId` varchar(50) NOT NULL, - `supportedFeatures` varchar(50) DEFAULT NULL, - `purgeFlag` tinyint(1) DEFAULT NULL, - `pei` varchar(50) DEFAULT NULL, - `imsVoPs` json DEFAULT NULL, - `deregCallbackUri` varchar(50) NOT NULL, - `amfServiceNameDereg` json DEFAULT NULL, - `pcscfRestorationCallbackUri` varchar(50) DEFAULT NULL, - `amfServiceNamePcscfRest` json DEFAULT NULL, - `initialRegistrationInd` tinyint(1) DEFAULT NULL, - `guami` json NOT NULL, - `backupAmfInfo` json DEFAULT NULL, - `drFlag` tinyint(1) DEFAULT NULL, - `ratType` json NOT NULL, - `urrpIndicator` tinyint(1) DEFAULT NULL, - `amfEeSubscriptionId` varchar(50) DEFAULT NULL, - `epsInterworkingInfo` json DEFAULT NULL, - `ueSrvccCapability` tinyint(1) DEFAULT NULL, - `registrationTime` varchar(50) DEFAULT NULL, - `vgmlcAddress` json DEFAULT NULL, - `contextInfo` json DEFAULT NULL, - `noEeSubscriptionInd` tinyint(1) DEFAULT NULL -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - --- -------------------------------------------------------- - --- --- Table structure for table `AuthenticationStatus` --- - -CREATE TABLE `AuthenticationStatus` ( - `ueid` varchar(20) NOT NULL, - `nfInstanceId` varchar(50) NOT NULL, - `success` tinyint(1) NOT NULL, - `timeStamp` varchar(50) NOT NULL, - `authType` varchar(25) NOT NULL, - `servingNetworkName` varchar(50) NOT NULL, - `authRemovalInd` tinyint(1) DEFAULT NULL -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - --- -------------------------------------------------------- - --- --- Table structure for table `AuthenticationSubscription` --- - -CREATE TABLE `AuthenticationSubscription` ( - `ueid` varchar(20) NOT NULL, - `authenticationMethod` varchar(25) NOT NULL, - `encPermanentKey` varchar(50) DEFAULT NULL, - `protectionParameterId` varchar(50) DEFAULT NULL, - `sequenceNumber` json DEFAULT NULL, - `authenticationManagementField` varchar(50) DEFAULT NULL, - `algorithmId` varchar(50) DEFAULT NULL, - `encOpcKey` varchar(50) DEFAULT NULL, - `encTopcKey` varchar(50) DEFAULT NULL, - `vectorGenerationInHss` tinyint(1) DEFAULT NULL, - `n5gcAuthMethod` varchar(15) DEFAULT NULL, - `rgAuthenticationInd` tinyint(1) DEFAULT NULL, - `supi` varchar(20) DEFAULT NULL -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - --- --- Dumping data for table `AuthenticationSubscription` --- - -INSERT INTO `AuthenticationSubscription` (`ueid`, `authenticationMethod`, `encPermanentKey`, `protectionParameterId`, `sequenceNumber`, `authenticationManagementField`, `algorithmId`, `encOpcKey`, `encTopcKey`, `vectorGenerationInHss`, `n5gcAuthMethod`, `rgAuthenticationInd`, `supi`) VALUES -('208950000000031', '5G_AKA', '0C0A34601D4F07677303652C0462535B', '0C0A34601D4F07677303652C0462535B', '{\"sqn\": \"000000000020\", \"sqnScheme\": \"NON_TIME_BASED\", \"lastIndexes\": {\"ausf\": 0}}', '8000', 'milenage', '63bfa50ee6523365ff14c1f45f88737d', NULL, NULL, NULL, NULL, '208950000000031'); - --- -------------------------------------------------------- - --- --- Table structure for table `SdmSubscriptions` --- - -CREATE TABLE `SdmSubscriptions` ( - `ueid` varchar(15) NOT NULL, - `subsId` int(10) UNSIGNED NOT NULL, - `nfInstanceId` varchar(50) NOT NULL, - `implicitUnsubscribe` tinyint(1) DEFAULT NULL, - `expires` varchar(50) DEFAULT NULL, - `callbackReference` varchar(50) NOT NULL, - `amfServiceName` json DEFAULT NULL, - `monitoredResourceUris` json NOT NULL, - `singleNssai` json DEFAULT NULL, - `dnn` varchar(50) DEFAULT NULL, - `subscriptionId` varchar(50) DEFAULT NULL, - `plmnId` json DEFAULT NULL, - `immediateReport` tinyint(1) DEFAULT NULL, - `report` json DEFAULT NULL, - `supportedFeatures` varchar(50) DEFAULT NULL, - `contextInfo` json DEFAULT NULL -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - --- -------------------------------------------------------- - --- --- Table structure for table `SessionManagementSubscriptionData` --- - -CREATE TABLE `SessionManagementSubscriptionData` ( - `ueid` varchar(15) NOT NULL, - `servingPlmnid` varchar(15) NOT NULL, - `singleNssai` json NOT NULL, - `dnnConfigurations` json DEFAULT NULL, - `internalGroupIds` json DEFAULT NULL, - `sharedVnGroupDataIds` json DEFAULT NULL, - `sharedDnnConfigurationsId` varchar(50) DEFAULT NULL, - `odbPacketServices` json DEFAULT NULL, - `traceData` json DEFAULT NULL, - `sharedTraceDataId` varchar(50) DEFAULT NULL, - `expectedUeBehavioursList` json DEFAULT NULL, - `suggestedPacketNumDlList` json DEFAULT NULL, - `3gppChargingCharacteristics` varchar(50) DEFAULT NULL -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - --- -------------------------------------------------------- - --- --- Table structure for table `SmfRegistrations` --- - -CREATE TABLE `SmfRegistrations` ( - `ueid` varchar(15) NOT NULL, - `subpduSessionId` int(10) NOT NULL, - `smfInstanceId` varchar(50) NOT NULL, - `smfSetId` varchar(50) DEFAULT NULL, - `supportedFeatures` varchar(50) DEFAULT NULL, - `pduSessionId` int(10) NOT NULL, - `singleNssai` json NOT NULL, - `dnn` varchar(50) DEFAULT NULL, - `emergencyServices` tinyint(1) DEFAULT NULL, - `pcscfRestorationCallbackUri` varchar(50) DEFAULT NULL, - `plmnId` json NOT NULL, - `pgwFqdn` varchar(50) DEFAULT NULL, - `epdgInd` tinyint(1) DEFAULT NULL, - `deregCallbackUri` varchar(50) DEFAULT NULL, - `registrationReason` json DEFAULT NULL, - `registrationTime` varchar(50) DEFAULT NULL, - `contextInfo` json DEFAULT NULL -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - --- -------------------------------------------------------- - --- --- Table structure for table `SmfSelectionSubscriptionData` --- - -CREATE TABLE `SmfSelectionSubscriptionData` ( - `ueid` varchar(15) NOT NULL, - `servingPlmnid` varchar(15) NOT NULL, - `supportedFeatures` varchar(50) DEFAULT NULL, - `subscribedSnssaiInfos` json DEFAULT NULL, - `sharedSnssaiInfosId` varchar(50) DEFAULT NULL -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - --- --- Indexes for dumped tables --- - --- --- Indexes for table `AccessAndMobilitySubscriptionData` --- -ALTER TABLE `AccessAndMobilitySubscriptionData` - ADD PRIMARY KEY (`ueid`,`servingPlmnid`) USING BTREE; - --- --- Indexes for table `Amf3GppAccessRegistration` --- -ALTER TABLE `Amf3GppAccessRegistration` - ADD PRIMARY KEY (`ueid`); - --- --- Indexes for table `AuthenticationStatus` --- -ALTER TABLE `AuthenticationStatus` - ADD PRIMARY KEY (`ueid`); - --- --- Indexes for table `AuthenticationSubscription` --- -ALTER TABLE `AuthenticationSubscription` - ADD PRIMARY KEY (`ueid`); - --- --- Indexes for table `SdmSubscriptions` --- -ALTER TABLE `SdmSubscriptions` - ADD PRIMARY KEY (`subsId`,`ueid`) USING BTREE; - --- --- Indexes for table `SessionManagementSubscriptionData` --- -ALTER TABLE `SessionManagementSubscriptionData` - ADD PRIMARY KEY (`ueid`,`servingPlmnid`) USING BTREE; - --- --- Indexes for table `SmfRegistrations` --- -ALTER TABLE `SmfRegistrations` - ADD PRIMARY KEY (`ueid`,`subpduSessionId`) USING BTREE; - --- --- Indexes for table `SmfSelectionSubscriptionData` --- -ALTER TABLE `SmfSelectionSubscriptionData` - ADD PRIMARY KEY (`ueid`,`servingPlmnid`) USING BTREE; - --- --- AUTO_INCREMENT for dumped tables --- - --- --- AUTO_INCREMENT for table `SdmSubscriptions` --- -ALTER TABLE `SdmSubscriptions` - MODIFY `subsId` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3; -COMMIT; - -/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; -/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; -/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; - diff --git a/ci-scripts/dsTesterDockerCompose/smf-healthy-check.sh b/ci-scripts/dsTesterDockerCompose/smf-healthy-check.sh deleted file mode 100755 index 924d1d950dc2c565e68094046b9a83d4d26a4f13..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTesterDockerCompose/smf-healthy-check.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -STATUS=0 - -RESULT=$(ps aux | grep -v nohup || true) -SUB='/openair-smf/bin/oai_smf -c /openair-smf/etc/smf.conf -o' -if [[ $RESULT =~ $SUB ]]; then - STATUS=0 -else - STATUS=-1 -fi - -exit $STATUS diff --git a/ci-scripts/dsTesterDockerCompose/udm-healthy-check.sh b/ci-scripts/dsTesterDockerCompose/udm-healthy-check.sh deleted file mode 100755 index 4a3ac112bc566888724d3a12a2149224cec0ccff..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTesterDockerCompose/udm-healthy-check.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -STATUS=0 - -RESULT=$(ps aux | grep -v nohup || true) -SUB='/openair-udm/bin/oai_udm -c /openair-udm/etc/udm.conf -o' -if [[ $RESULT =~ $SUB ]]; then - STATUS=0 -else - STATUS=-1 -fi - -exit $STATUS diff --git a/ci-scripts/dsTesterDockerCompose/udr-healthy-check.sh b/ci-scripts/dsTesterDockerCompose/udr-healthy-check.sh deleted file mode 100755 index ef68c8eb8109dbb6b0396cdd03d340a846568f6a..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTesterDockerCompose/udr-healthy-check.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -STATUS=0 - -RESULT=$(ps aux | grep -v nohup || true) -SUB='/openair-udr/bin/oai_udr -c /openair-udr/etc/udr.conf -o' -if [[ $RESULT =~ $SUB ]]; then - STATUS=0 -else - STATUS=-1 -fi - -exit $STATUS diff --git a/ci-scripts/dsTesterDockerCompose/upf-healthy-check.sh b/ci-scripts/dsTesterDockerCompose/upf-healthy-check.sh deleted file mode 100755 index 69a8fa4759cb69e3a703a2685c958b734b78c31f..0000000000000000000000000000000000000000 --- a/ci-scripts/dsTesterDockerCompose/upf-healthy-check.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -STATUS=0 - -RESULT=$(ps aux | grep -v nohup || true) -SUB='/openair-spgwu-tiny/bin/oai_spgwu -c /openair-spgwu-tiny/etc/spgw_u.conf -o' -if [[ $RESULT =~ $SUB ]]; then - STATUS=0 -else - STATUS=-1 -fi - -exit $STATUS diff --git a/ci-scripts/generateHtmlReportDeployment.py b/ci-scripts/generateHtmlReportDeployment.py deleted file mode 100644 index a9832271845804aae8cfb3c810cc804d2e52a7ce..0000000000000000000000000000000000000000 --- a/ci-scripts/generateHtmlReportDeployment.py +++ /dev/null @@ -1,204 +0,0 @@ -#/* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the OAI Public License, Version 1.1 (the "License"); you may not use this file -# * except in compliance with the License. -# * You may obtain a copy of the License at -# * -# * http://www.openairinterface.org/?page_id=698 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ -#--------------------------------------------------------------------- - -import os -import re -import sys -import subprocess - -class HtmlReport(): - def __init__(self): - self.job_name = '' - self.job_id = '' - self.job_url = '' - self.job_start_time = 'TEMPLATE_TIME' - self.amfConfigStatus = True - self.smfConfigStatus = True - self.upfConfigStatus = True - - def generate(self): - cwd = os.getcwd() - self.file = open(cwd + '/deploy_results_oai_cn5g.html', 'w') - self.generateHeader() - - finalStatus = self.deploySummaryHeader() - self.deploySummaryDetails() - - self.generateFooter() - self.file.close() - - finalStatus = True - if finalStatus: - sys.exit(0) - else: - sys.exit(-1) - - def generateHeader(self): - # HTML Header - self.file.write('<!DOCTYPE html>\n') - self.file.write('<html class="no-js" lang="en-US">\n') - self.file.write('<head>\n') - self.file.write(' <meta name="viewport" content="width=device-width, initial-scale=1">\n') - self.file.write(' <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css">\n') - self.file.write(' <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>\n') - self.file.write(' <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"></script>\n') - self.file.write(' <title>OAI Core Network Test Results for ' + self.job_name + ' job build #' + self.job_id + '</title>\n') - self.file.write('</head>\n') - self.file.write('<body><div class="container">\n') - self.file.write(' <table width = "100%" style="border-collapse: collapse; border: none;">\n') - self.file.write(' <tr style="border-collapse: collapse; border: none;">\n') - self.file.write(' <td style="border-collapse: collapse; border: none;">\n') - self.file.write(' <a href="http://www.openairinterface.org/">\n') - self.file.write(' <img src="http://www.openairinterface.org/wp-content/uploads/2016/03/cropped-oai_final_logo2.png" alt="" border="none" height=50 width=150>\n') - self.file.write(' </img>\n') - self.file.write(' </a>\n') - self.file.write(' </td>\n') - self.file.write(' <td style="border-collapse: collapse; border: none; vertical-align: center;">\n') - self.file.write(' <b><font size = "6">Job Summary -- Job: ' + self.job_name + ' -- Build-ID: <a href="' + self.job_url + '">' + self.job_id + '</a></font></b>\n') - self.file.write(' </td>\n') - self.file.write(' </tr>\n') - self.file.write(' </table>\n') - self.file.write(' <br>\n') - - def generateFooter(self): - self.file.write(' <div class="well well-lg">End of Test Report -- Copyright <span class="glyphicon glyphicon-copyright-mark"></span> 2020 <a href="http://www.openairinterface.org/">OpenAirInterface</a>. All Rights Reserved.</div>\n') - self.file.write('</div></body>\n') - self.file.write('</html>\n') - - def deploySummaryHeader(self): - self.file.write(' <h2>Deployment Summary</h2>\n') - - finalStatusOK = True - cwd = os.getcwd() - # Checking AMF config log - if os.path.isfile(cwd + '/archives/amf_config.log'): - amfStatus = False - with open(cwd + '/archives/amf_config.log','r') as amfLog: - for line in amfLog: - result = re.search('AMF Configuration Successful', line) - if result is not None: - amfStatus = True - amfLog.close() - if not amfStatus: - finalStatusOK = False - else: - amfStatus = False - finalStatusOK = False - self.amfConfigStatus = amfStatus - - if os.path.isfile(cwd + '/archives/smf_config.log'): - smfStatus = False - with open(cwd + '/archives/smf_config.log','r') as smfLog: - for line in smfLog: - result = re.search('SMF Configuration Successful', line) - if result is not None: - smfStatus = True - smfLog.close() - if not smfStatus: - finalStatusOK = False - else: - smfStatus = False - finalStatusOK = False - self.smfConfigStatus = smfStatus - - self.upfConfigStatus = True - - if finalStatusOK: - self.file.write(' <div class="alert alert-success">\n') - self.file.write(' <strong>Successful Deployment! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - else: - self.file.write(' <div class="alert alert-danger">\n') - self.file.write(' <strong>Failed Deployment! <span class="glyphicon glyphicon-warning-sign"></span></strong>\n') - self.file.write(' </div>\n') - - return finalStatusOK - - def deploySummaryDetails(self): - self.file.write(' <br>\n') - self.file.write(' <button data-toggle="collapse" data-target="#deploy-details">More details on deployment results</button>\n') - self.file.write(' <br>\n') - self.file.write(' <div id="deploy-details" class="collapse">\n') - self.file.write(' <table class="table-bordered" width = "60%" align = "center" border = 1>\n') - self.file.write(' <tr bgcolor = "#33CCFF" >\n') - if self.amfConfigStatus: - self.file.write(' <th>AMF Deployment OK</th>\n') - else: - self.file.write(' <th>AMF Deployment KO</th>\n') - if self.smfConfigStatus: - self.file.write(' <th>SMF Deployment OK</th>\n') - else: - self.file.write(' <th>SMF Deployment KO</th>\n') - if self.upfConfigStatus: - self.file.write(' <th>UPF Deployment OK</th>\n') - else: - self.file.write(' <th>UPF Deployment KO</th>\n') - self.file.write(' </tr>\n') - self.file.write(' </table>\n') - self.file.write(' </div>\n') - self.file.write(' <br>\n') - - -def Usage(): - print('----------------------------------------------------------------------------------------------------------------------') - print('generateHtmlReportDeployment.py') - print(' Generate an HTML report for the Jenkins pipeline on oai-cn5g-fed.') - print('----------------------------------------------------------------------------------------------------------------------') - print('Usage: python3 generateHtmlReportDeployment.py [options]') - print(' --help Show this help.') - print('---------------------------------------------------------------------------------------------- Mandatory Options -----') - print(' --job_name=[Jenkins Job name]') - print(' --job_id=[Jenkins Job Build ID]') - print(' --job_url=[Jenkins Job Build URL]') - -#-------------------------------------------------------------------------------------------------------- -# -# Start of main -# -#-------------------------------------------------------------------------------------------------------- - -argvs = sys.argv -argc = len(argvs) - -HTML = HtmlReport() - -while len(argvs) > 1: - myArgv = argvs.pop(1) - if re.match('^\-\-help$', myArgv, re.IGNORECASE): - Usage() - sys.exit(0) - elif re.match('^\-\-job_name=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-job_name=(.+)$', myArgv, re.IGNORECASE) - HTML.job_name = matchReg.group(1) - elif re.match('^\-\-job_id=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-job_id=(.+)$', myArgv, re.IGNORECASE) - HTML.job_id = matchReg.group(1) - elif re.match('^\-\-job_url=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-job_url=(.+)$', myArgv, re.IGNORECASE) - HTML.job_url = matchReg.group(1) - else: - sys.exit('Invalid Parameter: ' + myArgv) - -if HTML.job_name == '' or HTML.job_id == '' or HTML.job_url == '': - sys.exit('Missing Parameter in job description') - -HTML.generate() diff --git a/ci-scripts/generate_html.py b/ci-scripts/generate_html.py deleted file mode 100644 index c83654de4f4d8d3449e59123fbffda5cfbe81af9..0000000000000000000000000000000000000000 --- a/ci-scripts/generate_html.py +++ /dev/null @@ -1,198 +0,0 @@ -""" -Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -contributor license agreements. See the NOTICE file distributed with -this work for additional information regarding copyright ownership. -The OpenAirInterface Software Alliance licenses this file to You under -the OAI Public License, Version 1.1 (the "License"); you may not use this file -except in compliance with the License. -You may obtain a copy of the License at - - http://www.openairinterface.org/?page_id=698 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. ------------------------------------------------------------------------------- -For more information about the OpenAirInterface (OAI) Software Alliance: - contact@openairinterface.org ---------------------------------------------------------------------- -""" - -HEADER_TEMPLATE = 'ci-scripts/html-templates/file-header.htm' -FOOTER_TEMPLATE = 'ci-scripts/html-templates/file-footer.htm' -CHAPT_HEADER_TEMPLATE = 'ci-scripts/html-templates/chapter-header.htm' -BUTTON_HEADER_TEMPLATE = 'ci-scripts/html-templates/button-header.htm' -BUTTON_FOOTER_TEMPLATE = 'ci-scripts/html-templates/button-footer.htm' -IMAGE_TABLE_HEADER_TEMPLATE = 'ci-scripts/html-templates/image-table-header.htm' -IMAGE_TABLE_FOOTER_TEMPLATE = 'ci-scripts/html-templates/image-table-footer.htm' -IMAGE_TABLE_ROW_TEMPLATE = 'ci-scripts/html-templates/image-table-row.htm' -IMAGE_TABLE_SEP_TEMPLATE = 'ci-scripts/html-templates/image-table-sep.htm' -COMMAND_TABLE_HEADER_TEMPLATE = 'ci-scripts/html-templates/command-table-header.htm' -COMMAND_TABLE_FOOTER_TEMPLATE = 'ci-scripts/html-templates/command-table-footer.htm' -COMMAND_TABLE_ROW_TEMPLATE = 'ci-scripts/html-templates/command-table-row.htm' -LIST_HEADER_TEMPLATE = 'ci-scripts/html-templates/list-header.htm' -LIST_FOOTER_TEMPLATE = 'ci-scripts/html-templates/list-footer.htm' -LIST_ROW_TEMPLATE = 'ci-scripts/html-templates/list-row.htm' -LIST_SUB_HEADER_TEMPLATE = 'ci-scripts/html-templates/list-sub-header.htm' -LIST_SUB_FOOTER_TEMPLATE = 'ci-scripts/html-templates/list-sub-footer.htm' -LIST_SUB_ROW_TEMPLATE = 'ci-scripts/html-templates/list-sub-row.htm' - -import os -import re - -def generate_header(args): - cwd = os.getcwd() - header = '' - with open(os.path.join(cwd, HEADER_TEMPLATE), 'r') as temp: - header = temp.read() - header = re.sub('JOB_NAME', args.job_name, header) - header = re.sub('BUILD_ID', args.job_id, header) - header = re.sub('BUILD_URL', args.job_url, header) - return header - -def generate_footer(): - cwd = os.getcwd() - footer = '' - with open(os.path.join(cwd, FOOTER_TEMPLATE), 'r') as temp: - footer = temp.read() - return footer - -def generate_chapter(name, message, status): - cwd = os.getcwd() - header = '' - with open(os.path.join(cwd, CHAPT_HEADER_TEMPLATE), 'r') as temp: - header = temp.read() - header = re.sub('CHAPTER_NAME', name, header) - if status: - header = re.sub('ALERT_LEVEL', 'success', header) - else: - header = re.sub('ALERT_LEVEL', 'danger', header) - header = re.sub('MESSAGE', message, header) - return header - -def generate_button_header(name, message): - cwd = os.getcwd() - header = '' - with open(os.path.join(cwd, BUTTON_HEADER_TEMPLATE), 'r') as temp: - header = temp.read() - header = re.sub('BUTTON_NAME', name, header) - header = re.sub('BUTTON_MESSAGE', message, header) - return header - -def generate_button_footer(): - cwd = os.getcwd() - footer = '' - with open(os.path.join(cwd, BUTTON_FOOTER_TEMPLATE), 'r') as temp: - footer = temp.read() - return footer - -def generate_image_table_header(): - cwd = os.getcwd() - header = '' - with open(os.path.join(cwd, IMAGE_TABLE_HEADER_TEMPLATE), 'r') as temp: - header = temp.read() - return header - -def generate_image_table_footer(): - cwd = os.getcwd() - footer = '' - with open(os.path.join(cwd, IMAGE_TABLE_FOOTER_TEMPLATE), 'r') as temp: - footer = temp.read() - return footer - -def generate_image_table_row(name, tag, ocTag, creationDate, size): - cwd = os.getcwd() - row = '' - with open(os.path.join(cwd, IMAGE_TABLE_ROW_TEMPLATE), 'r') as temp: - row = temp.read() - row = re.sub('CONTAINER_NAME', name, row) - row = re.sub('IMAGE_TAG', tag, row) - row = re.sub('OC_TAG', ocTag, row) - row = re.sub('CREATION_DATE', creationDate, row) - row = re.sub('IMAGE_SIZE', size, row) - return row - -def generate_image_table_separator(): - cwd = os.getcwd() - row = '' - with open(os.path.join(cwd, IMAGE_TABLE_SEP_TEMPLATE), 'r') as temp: - row = temp.read() - return row - -def generate_command_table_header(): - cwd = os.getcwd() - header = '' - with open(os.path.join(cwd, COMMAND_TABLE_HEADER_TEMPLATE), 'r') as temp: - header = temp.read() - return header - -def generate_command_table_footer(): - cwd = os.getcwd() - footer = '' - with open(os.path.join(cwd, COMMAND_TABLE_FOOTER_TEMPLATE), 'r') as temp: - footer = temp.read() - return footer - -def generate_command_table_row(command, status): - cwd = os.getcwd() - row = '' - with open(os.path.join(cwd, COMMAND_TABLE_ROW_TEMPLATE), 'r') as temp: - row = temp.read() - row = re.sub('COMMAND', command, row) - if status: - row = re.sub('STATUS', 'PASS', row) - row = re.sub('COLOR', 'lightgreen', row) - else: - row = re.sub('STATUS', 'FAIL', row) - row = re.sub('COLOR', 'lightcoral', row) - return row - -def generate_list_header(): - cwd = os.getcwd() - header = '' - with open(os.path.join(cwd, LIST_HEADER_TEMPLATE), 'r') as temp: - header = temp.read() - return header - -def generate_list_footer(): - cwd = os.getcwd() - footer = '' - with open(os.path.join(cwd, LIST_FOOTER_TEMPLATE), 'r') as temp: - footer = temp.read() - return footer - -def generate_list_row(message, iconName): - cwd = os.getcwd() - row = '' - with open(os.path.join(cwd, LIST_ROW_TEMPLATE), 'r') as temp: - row = temp.read() - row = re.sub('ROW_MESSAGE', message, row) - row = re.sub('ICON_NAME', iconName, row) - return row - -def generate_list_sub_header(): - cwd = os.getcwd() - header = '' - with open(os.path.join(cwd, LIST_SUB_HEADER_TEMPLATE), 'r') as temp: - header = temp.read() - return header - -def generate_list_sub_footer(): - cwd = os.getcwd() - footer = '' - with open(os.path.join(cwd, LIST_SUB_FOOTER_TEMPLATE), 'r') as temp: - footer = temp.read() - return footer - -# bagdeColor can be 'primary', 'secondary', 'success', 'danger', 'warning', 'info', 'light', 'dark' -def generate_list_sub_row(message, nbInBadge, bagdeColor): - cwd = os.getcwd() - row = '' - with open(os.path.join(cwd, LIST_SUB_ROW_TEMPLATE), 'r') as temp: - row = temp.read() - row = re.sub('ROW_MESSAGE', message, row) - row = re.sub('NUMBER', nbInBadge, row) - row = re.sub('LEVEL', bagdeColor, row) - return row diff --git a/ci-scripts/generate_spgwu-tiny_config_script.py b/ci-scripts/generate_spgwu-tiny_config_script.py deleted file mode 100755 index 620ea7683d0e946b314d22238e58345d33ef3470..0000000000000000000000000000000000000000 --- a/ci-scripts/generate_spgwu-tiny_config_script.py +++ /dev/null @@ -1,213 +0,0 @@ -#/* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the OAI Public License, Version 1.1 (the "License"); you may not use this file -# * except in compliance with the License. -# * You may obtain a copy of the License at -# * -# * http://www.openairinterface.org/?page_id=698 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ -#--------------------------------------------------------------------- - -import os -import re -import sys - -class spgwuConfigGen(): - def __init__(self): - self.kind = '' - self.s1u_name = '' - self.sgi_name = '' - self.sxu_name = '' - self.spgwc0_ip_addr = '' - self.pdn_list = '' - self.prefix = '' - self.fromDockerFile = False - - def GenerateSpgwuConfigurer(self): - pdns = self.pdn_list.split(); - conf_file = open('./spgw_u.conf', 'w') - conf_file.write('# generated by generate_spgwu-tiny_config_scripts.py\n') - conf_file.write('SPGW-U =\n') - conf_file.write('{\n') - conf_file.write(' INSTANCE = 0; # 0 is the default\n') - conf_file.write(' PID_DIRECTORY = "/var/run"; # /var/run is the default\n') - conf_file.write(' #ITTI_TASKS :\n') - conf_file.write(' #{\n') - conf_file.write(' #ITTI_TIMER_SCHED_PARAMS :\n') - conf_file.write(' #{\n') - conf_file.write(' #CPU_ID = 1;\n') - conf_file.write(' #SCHED_POLICY = "SCHED_FIFO"; # Values in { SCHED_OTHER, SCHED_IDLE, SCHED_BATCH, SCHED_FIFO, SCHED_RR }\n') - conf_file.write(' #SCHED_PRIORITY = 85;\n') - conf_file.write(' #};\n') - conf_file.write(' #S1U_SCHED_PARAMS :\n') - conf_file.write(' #{\n') - conf_file.write(' #CPU_ID = 1;\n') - conf_file.write(' #SCHED_POLICY = "SCHED_FIFO"; # Values in { SCHED_OTHER, SCHED_IDLE, SCHED_BATCH, SCHED_FIFO, SCHED_RR }\n') - conf_file.write(' #SCHED_PRIORITY = 84;\n') - conf_file.write(' #};\n') - conf_file.write(' #SX_SCHED_PARAMS :\n') - conf_file.write(' #{\n') - conf_file.write(' #CPU_ID = 1;\n') - conf_file.write(' #SCHED_POLICY = "SCHED_FIFO"; # Values in { SCHED_OTHER, SCHED_IDLE, SCHED_BATCH, SCHED_FIFO, SCHED_RR }\n') - conf_file.write(' #SCHED_PRIORITY = 84;\n') - conf_file.write(' #};\n') - conf_file.write(' #ASYNC_CMD_SCHED_PARAMS :\n') - conf_file.write(' #{\n') - conf_file.write(' #CPU_ID = 1;\n') - conf_file.write(' #SCHED_POLICY = "SCHED_FIFO"; # Values in { SCHED_OTHER, SCHED_IDLE, SCHED_BATCH, SCHED_FIFO, SCHED_RR }\n') - conf_file.write(' #SCHED_PRIORITY = 84;\n') - conf_file.write(' #};\n') - conf_file.write(' #};\n') - conf_file.write(' INTERFACES :\n') - conf_file.write(' {\n') - conf_file.write(' S1U_S12_S4_UP :\n') - conf_file.write(' {\n') - conf_file.write(' # S-GW binded interface for S1-U communication (GTPV1-U) can be ethernet interface, virtual ethernet interface, we don\'t advise wireless interfaces\n') - conf_file.write(' INTERFACE_NAME = "'+self.s1u_name+'"; # STRING, interface name, YOUR NETWORK CONFIG HERE\n') - conf_file.write(' IPV4_ADDRESS = "read"; # STRING, CIDR or "read to let app read interface configured IP address\n') - conf_file.write(' #PORT = 2152; # Default is 2152\n') - conf_file.write(' #SCHED_PARAMS :\n') - conf_file.write(' #{\n') - conf_file.write(' #CPU_ID = 2;\n') - conf_file.write(' #SCHED_POLICY = "SCHED_FIFO"; # Values in { SCHED_OTHER, SCHED_IDLE, SCHED_BATCH, SCHED_FIFO, SCHED_RR }\n') - conf_file.write(' #SCHED_PRIORITY = 98;\n') - conf_file.write(' #};\n') - conf_file.write(' };\n') - conf_file.write(' SX :\n') - conf_file.write(' {\n') - conf_file.write(' # S/P-GW binded interface for SX communication\n') - conf_file.write(' INTERFACE_NAME = "'+self.sxu_name+'"; # STRING, interface name\n') - conf_file.write(' IPV4_ADDRESS = "read"; # STRING, CIDR or "read" to let app read interface configured IP address\n') - conf_file.write(' #PORT = 8805; # Default is 8805\n') - conf_file.write(' #SCHED_PARAMS :\n') - conf_file.write(' #{\n') - conf_file.write(' #CPU_ID = 1;\n') - conf_file.write(' #SCHED_POLICY = "SCHED_FIFO"; # Values in { SCHED_OTHER, SCHED_IDLE, SCHED_BATCH, SCHED_FIFO, SCHED_RR }\n') - conf_file.write(' #SCHED_PRIORITY = 95;\n') - conf_file.write(' #};\n') - conf_file.write(' };\n') - conf_file.write(' SGI :\n') - conf_file.write(' {\n') - conf_file.write(' # No config to set, the software will set the SGi interface to the interface used for the default route.\n') - conf_file.write(' INTERFACE_NAME = "'+self.sgi_name+'"; # STRING, interface name or "default_gateway"\n') - conf_file.write(' IPV4_ADDRESS = "read"; # STRING, CIDR or "read" to let app read interface configured IP address\n') - conf_file.write(' #SCHED_PARAMS :\n') - conf_file.write(' #{\n') - conf_file.write(' #CPU_ID = 3;\n') - conf_file.write(' #SCHED_POLICY = "SCHED_FIFO"; # Values in { SCHED_OTHER, SCHED_IDLE, SCHED_BATCH, SCHED_FIFO, SCHED_RR }\n') - conf_file.write(' #SCHED_PRIORITY = 98;\n') - conf_file.write(' #};\n') - conf_file.write(' };\n') - conf_file.write(' };\n') - conf_file.write(' PDN_NETWORK_LIST = (\n') - for pdn in pdns[ 0:len(pdns)-1 ]: - conf_file.write(' {NETWORK_IPV4 = "'+pdn+'"; SNAT = "no";},\n') - pdn = pdns[len(pdns) - 1] - conf_file.write(' {NETWORK_IPV4 = "'+pdn+'"; SNAT = "no";}\n') - conf_file.write(' );\n') - conf_file.write(' SPGW-C_LIST = (\n') - conf_file.write(' {IPV4_ADDRESS="' + self.spgwc0_ip_addr + '" ;}\n') - conf_file.write(' );\n') - conf_file.write('};\n') - conf_file.close() - -#----------------------------------------------------------- -# Usage() -#----------------------------------------------------------- -def Usage(): - print('--------------------------------------------------------------------') - print('generate_spgwu-tiny_config_scripts.py') - print(' Prepare a bash script to be run in the workspace where SPGW-U-TINY is being built.') - print(' That bash script will copy configuration template files and adapt to your configuration.') - print('--------------------------------------------------------------------') - print('Usage: python3 generate_spgwu-tiny_config_scripts.py [options]') - print(' --help Show this help.') - print('--------------- SPGW-U Options -----') - print(' --kind=SPGW-U') - print(' --sxc_ip_addr=[SPGW-C SX IP address]') - print(' --sxu=[SPGW-U SX Interface Name]') - print(' --s1u=[SPGW-U S1-U Interface Name]') - print(' --sgi=[SPGW-U SGi Interface Name]') - print(' --pdn_list=["PDNs"]') - print(' --prefix=["Prefix for configuration files"]') - print(' --from_docker_file') - -argvs = sys.argv -argc = len(argvs) -cwd = os.getcwd() - -mySpgwuCfg = spgwuConfigGen() - -while len(argvs) > 1: - myArgv = argvs.pop(1) - if re.match('^\-\-help$', myArgv, re.IGNORECASE): - Usage() - sys.exit(0) - elif re.match('^\-\-kind=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-kind=(.+)$', myArgv, re.IGNORECASE) - mySpgwuCfg.kind = matchReg.group(1) - elif re.match('^\-\-sxu=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-sxu=(.+)$', myArgv, re.IGNORECASE) - mySpgwuCfg.sxu_name = matchReg.group(1) - elif re.match('^\-\-sxc_ip_addr=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-sxc_ip_addr=(.+)$', myArgv, re.IGNORECASE) - mySpgwuCfg.spgwc0_ip_addr = matchReg.group(1) - elif re.match('^\-\-s1u=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-s1u=(.+)$', myArgv, re.IGNORECASE) - mySpgwuCfg.s1u_name = matchReg.group(1) - elif re.match('^\-\-sgi=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-sgi=(.+)$', myArgv, re.IGNORECASE) - mySpgwuCfg.sgi_name = matchReg.group(1) - elif re.match('^\-\-pdn_list=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-pdn_list=(.+)$', myArgv, re.IGNORECASE) - mySpgwuCfg.pdn_list = str(matchReg.group(1)) - elif re.match('^\-\-prefix=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-prefix=(.+)$', myArgv, re.IGNORECASE) - mySpgwuCfg.prefix = matchReg.group(1) - elif re.match('^\-\-from_docker_file', myArgv, re.IGNORECASE): - mySpgwuCfg.fromDockerFile = True - else: - Usage() - sys.exit('Invalid Parameter: ' + myArgv) - -if mySpgwuCfg.kind == '': - Usage() - sys.exit('missing kind parameter') - -if mySpgwuCfg.kind == 'SPGW-U': - if mySpgwuCfg.sxu_name == '': - Usage() - sys.exit('missing SX Interface Name on SPGW-U container') - elif mySpgwuCfg.s1u_name == '': - Usage() - sys.exit('missing S1-U Interface Name on SPGW-U container') - elif mySpgwuCfg.sgi_name == '': - Usage() - sys.exit('missing SGi Interface Name on SPGW-U container') - elif mySpgwuCfg.pdn_list == '': - Usage() - sys.exit('missing pdn_list') - elif mySpgwuCfg.spgwc0_ip_addr == '': - Usage() - sys.exit('missing SPGW-C #0 IP address on SX interface') - elif mySpgwuCfg.prefix == '': - Usage() - sys.exit('missing prefix') - else: - mySpgwuCfg.GenerateSpgwuConfigurer() - sys.exit(0) -else: - Usage() - sys.exit('invalid kind parameter') diff --git a/ci-scripts/helmDeploy.py b/ci-scripts/helmDeploy.py deleted file mode 100644 index 47e9d03ddbc16db1821772449e787b0610e174db..0000000000000000000000000000000000000000 --- a/ci-scripts/helmDeploy.py +++ /dev/null @@ -1,367 +0,0 @@ -#/* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the OAI Public License, Version 1.1 (the "License"); you may not use this file -# * except in compliance with the License. -# * You may obtain a copy of the License at -# * -# * http://www.openairinterface.org/?page_id=698 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ -#--------------------------------------------------------------------- -# -# Required Python Version -# Python 3.x -# -# Required Python Package -# pexpect -#--------------------------------------------------------------------- - -#----------------------------------------------------------- -# Import -#----------------------------------------------------------- -import logging -import html -import os -import re -import time -import sys -import subprocess - -logging.basicConfig( - level=logging.DEBUG, - format="[%(asctime)s] %(name)s:%(levelname)s: %(message)s" -) - -class ClusterDeploy: - def __init__(self): - self.OCUserName = "" - self.OCPassword = "" - self.OCProjectName = "" - self.imageTags = "" - self.mode = "" - -#-----------------$ -#PUBLIC Methods$ -#-----------------$ - - def Deploy_5gcn(self): - ocUserName = self.OCUserName - ocPassword = self.OCPassword - ocProjectName = self.OCProjectName - limageTags = self.imageTags - if ocUserName == '' or ocPassword == '' or ocProjectName == '' or limageTags == '': - sys.exit('Insufficient Parameter') - - logging.debug('\u001B[1m Checking all 5GCN component IMAGES are pre existing and correct\u001B[0m') - images = limageTags.split(',') - for image in images: - eachImage = image.split(':') - imageName = eachImage[0] - imageTag = eachImage[1] - if imageName == 'mysql': - continue - # Check if image is exist on the Red Hat server, before pushing it to OC cluster - subprocess.run(f'echo "IMAGENAME_TAG: {imageName}:{imageTag}" > archives/{imageName}_image_info.log', shell=True) - res = subprocess.check_output("sudo podman image inspect --format='Size = {{.Size}} bytes' " + f'{imageName}:{imageTag} | tee -a archives/{imageName}_image_info.log', shell=True, universal_newlines=True) - subprocess.run("sudo podman image inspect --format='Date = {{.Created}}' " + f'{imageName}:{imageTag} >> archives/{imageName}_image_info.log', shell=True) - res2 = re.search('no such image', str(res.strip())) - if res2 is not None: - logging.error(f'\u001B[1m No such image {imageName}]\u001B[0m') - sys.exit(-1) - else: - res2 = re.search('Size *= *(?P<size>[0-9\-]+) *bytes', str(res.strip())) - if res2 is not None: - imageSize = float(res2.group('size')) - imageSize = imageSize / 1000 - if imageSize < 1000: - logging.debug(f'\u001B[1m {imageName} size is ' + ('%.0f' % imageSize) + ' kbytes\u001B[0m') - else: - imageSize = imageSize / 1000 - if imageSize < 1000: - logging.debug(f'\u001B[1m {imageName} size is ' + ('%.0f' % imageSize) + ' Mbytes\u001B[0m') - else: - imageSize = imageSize / 1000 - logging.debug(f'\u001B[1m {imageName} is ' + ('%.3f' % imageSize) + ' Gbytes\u001B[0m') - else: - logging.debug(f'{imageName} size is unknown') - - # logging to OC Cluster and then switch to corresponding project - res = subprocess.check_output(f'oc login -u {ocUserName} -p {ocPassword}', shell=True, universal_newlines=True) - res2 = re.search('Login successful.', str(res.strip())) - if res2 is None: - logging.error('\u001B[1m OC Cluster Login Failed\u001B[0m') - sys.exit(-1) - else: - logging.debug('\u001B[1m Login to OC Cluster Successfully\u001B[0m') - res = subprocess.check_output(f'oc project {ocProjectName}', shell=True, universal_newlines=True) - res2 = re.search(f'Already on project "{ocProjectName}"', str(res.strip())) - res3 = re.search(f'Now using project "{self.OCProjectName}"', str(res.strip())) - if res2 is None and res3 is None: - logging.error(f'\u001B[1m Unable to access OC project {ocProjectName}\u001B[0m') - sys.exit(-1) - else: - logging.debug(f'\u001B[1m Now using project {ocProjectName}\u001B[0m') - - # Tag the image and push to the OC cluster - res = subprocess.check_output('oc whoami -t | sudo podman login -u ' + ocUserName + ' --password-stdin https://default-route-openshift-image-registry.apps.5glab.nsa.eurecom.fr/ --tls-verify=false', shell=True, universal_newlines=True) - res2 = re.search('Login Succeeded!', str(res.strip())) - if res2 is None: - logging.error('\u001B[1m Podman Login to OC Cluster Registry Failed\u001B[0m') - sys.exit(-1) - else: - logging.debug('\u001B[1m Podman Login to OC Cluster Registry Successfully\u001B[0m') - for image in images: - eachImage = image.split(':') - imageName = eachImage[0] - imageTag = eachImage[1] - if imageName == 'mysql': - continue - res = subprocess.check_output(f'oc create -f openshift/{imageName}-image-stream.yml 2>&1 | tee -a archives/5gcn_imagestream_summary.txt || true', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - res2 = re.search('already exists', str(res.strip())) - res3 = re.search('created', str(res.strip())) - if res2 is None and res3 is None: - logging.error(f'\u001B[1m Image Stream "{imageName}" Creation Failed on OC project {ocProjectName}\u001B[0m') - sys.exit(-1) - else: - logging.debug(f'\u001B[1m Image Stream "{imageName}" created on OC project {ocProjectName}\u001B[0m') - subprocess.run(f'sudo podman tag {imageName}:{imageTag} default-route-openshift-image-registry.apps.5glab.nsa.eurecom.fr/{ocProjectName}/{imageName}:{imageTag}', shell=True) - res = subprocess.check_output(f'sudo podman push default-route-openshift-image-registry.apps.5glab.nsa.eurecom.fr/{ocProjectName}/{imageName}:{imageTag} --tls-verify=false 2>&1 | tee -a archives/5gcn_imagepush_summary.txt', shell=True, universal_newlines=True) - time.sleep(10) - res2 = re.search('Storing signatures', str(res.strip())) - if res2 is None: - logging.error(f'\u001B[1m Image "{imageName}" push to OC Cluster Registry Failed\u001B[0m') - sys.exit(-1) - else: - logging.debug(f'\u001B[1m Image "{imageName}" push to OC Cluster Registry Successfully\u001B[0m') - - # Using helm charts deployment - logging.debug(f'\u001B[1m Deploying 5GCN Components on the Cluster using Helm Charts\u001B[0m') - passPods = 0 - time.sleep(5) - for image in images: - eachImage = image.split(':') - imageName = eachImage[0] - imageTag = eachImage[1] - nameSufix = '' - subprocess.run(f'sed -i -e "s#PROJECT#{ocProjectName}#g" ./ci-scripts/charts/{imageName}/values.yaml', shell=True) - subprocess.run(f'sed -i -e "s#TAG#{imageTag}#g" ./ci-scripts/charts/{imageName}/values.yaml', shell=True) - if imageName == 'oai-nrf': - nameSufix = 'nrf' - if imageName == 'oai-udr': - nameSufix = 'udr' - if imageName == 'oai-udm': - nameSufix = 'udm' - if imageName == 'oai-ausf': - nameSufix = 'ausf' - elif imageName == 'oai-amf': - nameSufix = 'amf' - elif imageName == 'oai-smf': - nameSufix = 'smf' - elif imageName == 'oai-spgwu-tiny': - nameSufix = 'spgwu' - res = subprocess.check_output(f'helm install {imageName} ./ci-scripts/charts/{imageName}/ | tee -a archives/5gcn_helm_summary.txt 2>&1', shell=True, universal_newlines=True) - res2 = re.search('STATUS: deployed', str(res.strip())) - if res2 is None: - subprocess.run(f'echo "{imageName}: HELM KO" >> archives/5gcn_helm_summary.txt 2>&1', shell=True) - logging.error(f'\u001B[1m Deploying "{imageName}" Failed using helm chart on OC Cluster\u001B[0m') - logging.error(f'\u001B[1m 5GCN Deployment: KO \u001B[0m') - subprocess.run(f'echo "DEPLOYMENT: KO" > archives/deployment_status.log', shell=True) - sys.exit(-1) - else: - subprocess.run(f'echo "{imageName}: HELM OK" >> archives/5gcn_helm_summary.txt 2>&1', shell=True) - logging.debug(f'\u001B[1m Deployed "{imageName}" Successfully using helm chart\u001B[0m') - time.sleep(20) - res = subprocess.check_output(f'oc get pods -o wide -l app.kubernetes.io/instance={imageName} | tee -a archives/5gcn_pods_summary.txt', shell=True, universal_newlines=True) - res2 = re.search(f'{imageName}[\S\d\w]+', str(res.strip())) - podName = res2.group(0) - isRunning = False - count = 0 - while count < 12 and isRunning == False: - time.sleep(5) - if imageName == 'mysql': - res = subprocess.check_output(f'oc exec {podName} -i -t -- mysqladmin -u root --password=linux ping || true', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - else: - res = subprocess.check_output(f'oc exec {podName} -c {nameSufix} -- ps aux || true', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - res2 = re.search(f'oai_{nameSufix}', str(res.strip())) - res3 = re.search(f'mysqld is alive', str(res.strip())) - if res2 is not None or res3 is not None: - logging.debug(f'\u001B[1m POD "{imageName}" Service Running Sucessfully\u001B[0m') - subprocess.run(f'echo "{imageName}: POD OK" >> archives/5gcn_pods_summary.txt 2>&1', shell=True) - isRunning = True - passPods += 1 - count +=1 - if isRunning == False: - logging.error(f'\u001B[1m POD "{imageName}" Service Running FAILED \u001B[0m') - subprocess.run(f'echo "{imageName}: POD KO" >> archives/5gcn_pods_summary.txt 2>&1', shell=True) - sys.exit(-1) - - if passPods == 8: - logging.debug(f'\u001B[1m 5GCN Deployment: OK \u001B[0m') - subprocess.run(f'echo "DEPLOYMENT: OK" > archives/deployment_status.log', shell=True) - else: - logging.error(f'\u001B[1m 5GCN Deployment: KO \u001B[0m') - subprocess.run(f'echo "DEPLOYMENT: KO" > archives/deployment_status.log', shell=True) - sys.exit(-1) - subprocess.run('oc logout', shell=True) - - def UnDeploy_5gcn(self): - logging.debug('\u001B[1m UnDeploying the 5GCN\u001B[0m') - # logging to OC Cluster and then switch to corresponding project - res = subprocess.check_output(f'oc login -u {self.OCUserName} -p {self.OCPassword}|| true', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - res2 = re.search(f'Login successful.', str(res.strip())) - if res2 is None: - logging.error('\u001B[1m OC Cluster Login Failed\u001B[0m') - sys.exit(-1) - res = subprocess.check_output(f'oc project {self.OCProjectName} || true', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - res2 = re.search(f'Already on project "{self.OCProjectName}"', str(res.strip())) - res3 = re.search(f'Now using project "{self.OCProjectName}"', str(res.strip())) - if res2 is None and res3 is None: - logging.error(f'\u001B[1m Unable to access OC project {self.OCProjectName}\u001B[0m') - sys.exit(-1) - - # UnDeploy the 5gcn pods - images = self.imageTags.split(',') - for image in images: - eachImage = image.split(':') - imageName = eachImage[0] - imageTag = eachImage[1] - res = subprocess.check_output(f'helm uninstall {imageName} | tee -a archives/5gcn_helm_summary.txt 2>&1', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - res2 = re.search(f'release "{imageName}" uninstalled', str(res.strip())) - res3 = re.search(f'Error: uninstall: Release not loaded: {imageName}: release: not found', str(res.strip())) - if res2 is not None: - logging.debug(f'\u001B[1m UnDeployed "{imageName}" Successfully on OC Cluster\u001B[0m') - elif res3 is None: - logging.error(f'\u001B[1m UnDeploying "{imageName}" Failed using helm chart on OC Cluster\u001B[0m') - time.sleep(2) - # Delete images and imagestream - if imageName == 'mysql': - continue - res = subprocess.check_output(f'sudo podman rmi default-route-openshift-image-registry.apps.5glab.nsa.eurecom.fr/{self.OCProjectName}/{imageName}:{imageTag} || true', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - res2 = re.search('in local storage: no such image', str(res.strip())) - res = subprocess.check_output(f'oc delete is {imageName} || true', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - res3 = re.search('Error from server (NotFound):', str(res.strip())) - if res2 is None and res3 is None: - logging.debug(f'\u001B[1m Deleted the "{imageName}" Image and ImageStream\u001B[0m') - subprocess.run('oc logout', shell=True) - - def GetLogsConfigsPcaps(self): - # copying the pod's logs, configs, pcaps - logging.debug(f'\u001B[1m Copying the PODs log, config & pcap\u001B[0m') - res = subprocess.check_output(f'oc login -u {self.OCUserName} -p {self.OCPassword}|| true', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - res2 = re.search(f'Login successful.', str(res.strip())) - if res2 is None: - logging.error('\u001B[1m OC Cluster Login Failed\u001B[0m') - sys.exit(-1) - res = subprocess.check_output(f'oc project {self.OCProjectName} || true', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - res2 = re.search(f'Already on project "{self.OCProjectName}"', str(res.strip())) - res3 = re.search(f'Now using project "{self.OCProjectName}"', str(res.strip())) - if res2 is None and res3 is None: - logging.error(f'\u001B[1m Unable to access OC project {self.OCProjectName}\u001B[0m') - sys.exit(-1) - subprocess.run(f'mkdir -p archives/pcap', shell=True) - subprocess.run(f'mkdir -p archives/logs', shell=True) - subprocess.run(f'mkdir -p archives/config', shell=True) - images = self.imageTags.split(',') - for image in images: - eachImage = image.split(':') - imageName = eachImage[0] - imageTag = eachImage[1] - if imageName == 'mysql': - continue - if imageName == 'oai-nrf': - nameSufix = 'nrf' - if imageName == 'oai-udr': - nameSufix = 'udr' - if imageName == 'oai-udm': - nameSufix = 'udm' - if imageName == 'oai-ausf': - nameSufix = 'ausf' - elif imageName == 'oai-amf': - nameSufix = 'amf' - elif imageName == 'oai-smf': - nameSufix = 'smf' - elif imageName == 'oai-spgwu-tiny': - nameSufix = 'spgwu' - nameSufix1 = 'spgwu-tiny' - res = subprocess.check_output(f'oc get pods -o wide -l app.kubernetes.io/instance={imageName} || true', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - res2 = re.search(f'{imageName}[\S\d\w]+', str(res.strip())) - podName = res2.group(0) - subprocess.run(f'oc logs {podName} {nameSufix} > archives/logs/{imageName}_pod.log', shell=True) - time.sleep(1) - if imageName == 'oai-spgwu-tiny': - subprocess.run(f'oc cp {podName}:/openair-{nameSufix1}/etc/ archives/config/ -c {nameSufix}', shell=True) - subprocess.run(f'oc cp {podName}:/pcap/ archives/pcap/ -c tcpdump', shell=True) - else: - subprocess.run(f'oc cp {podName}:/openair-{nameSufix}/etc/ archives/config/ -c {nameSufix}', shell=True) - subprocess.run('oc logout', shell=True) - -def Usage(): - print('----------------------------------------------------------------------------------------------------------------------') - print('helmDeploy.py') - print(' Deploy and UnDeploy the 5gcn components on the Cluster.') - print('----------------------------------------------------------------------------------------------------------------------') - print('Usage: python3 helmDeploy.py [options]') - print(' --help Show this help.') - print('---------------------------------------------------------------------------------------------- Mandatory Options -----') - print(' --mode=[Deploy/UnDeploy]') - print(' --OCUserName=[Cluster UserName]') - print(' --OCPassword=[Cluster Password]') - print(' --OCProjectName=[Cluster Project name]') - print(' --imageTags=[Image tags of all the 5gcn components]') - print('------------------------------------------------------------------------------------------------- Actions Syntax -----') - print('python3 helmDeploy.py --mode=Deploy [Mandatory Options]') - print('python3 helmDeploy.py --mode=UnDeploy [Mandatory Options]') - print('python3 helmDeploy.py --mode=GetLogs [Mandatory Options]') - - - -#-------------------------------------------------------------------------------------------------------- -# -# Start of main -# -#-------------------------------------------------------------------------------------------------------- - -CN = ClusterDeploy() - -argvs = sys.argv - -while len(argvs) > 1: - myArgv = argvs.pop(1) - if re.match('^\-\-help$', myArgv, re.IGNORECASE): - Usage() - sys.exit(0) - elif re.match('^\-\-mode=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-mode=(.+)$', myArgv, re.IGNORECASE) - CN.mode = matchReg.group(1) - elif re.match('^\-\-OCUserName=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-OCUserName=(.+)$', myArgv, re.IGNORECASE) - CN.OCUserName = matchReg.group(1) - elif re.match('^\-\-OCPassword=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-OCPassword=(.+)$', myArgv, re.IGNORECASE) - CN.OCPassword = matchReg.group(1) - elif re.match('^\-\-OCProjectName=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-OCProjectName=(.+)$', myArgv, re.IGNORECASE) - CN.OCProjectName = matchReg.group(1) - elif re.match('^\-\-imageTags=(.+)$', myArgv, re.IGNORECASE): - matchReg = re.match('^\-\-imageTags=(.+)$', myArgv, re.IGNORECASE) - CN.imageTags = matchReg.group(1) - else: - sys.exit('Invalid Parameter: ' + myArgv) - -if CN.mode == 'Deploy': - CN.Deploy_5gcn() -elif CN.mode == 'UnDeploy': - CN.UnDeploy_5gcn() -elif CN.mode == 'GetLogs': - CN.GetLogsConfigsPcaps() diff --git a/ci-scripts/html-templates/button-footer.htm b/ci-scripts/html-templates/button-footer.htm deleted file mode 100644 index a566729543a6737a5cc00ffb78c4ff4da16dd14d..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/button-footer.htm +++ /dev/null @@ -1,2 +0,0 @@ - </div> - <br> diff --git a/ci-scripts/html-templates/button-header.htm b/ci-scripts/html-templates/button-header.htm deleted file mode 100644 index ed1ea92ce4bd1f254bb54671824fd9ccb594c9b2..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/button-header.htm +++ /dev/null @@ -1,5 +0,0 @@ - <br> - <button data-toggle="collapse" data-target="#BUTTON_NAME">BUTTON_MESSAGE</button> - <br> - <div id="BUTTON_NAME" class="collapse"> - <br> diff --git a/ci-scripts/html-templates/chapter-header.htm b/ci-scripts/html-templates/chapter-header.htm deleted file mode 100644 index 1326408fae5cfa13c1cb14aeb7eff4bf2943b760..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/chapter-header.htm +++ /dev/null @@ -1,4 +0,0 @@ -<h2>CHAPTER_NAME</h2> -<div class="alert alert-ALERT_LEVEL"> - <strong>MESSAGE <span class="glyphicon glyphicon-warning-sign"></span></strong> -</div> diff --git a/ci-scripts/html-templates/command-table-footer.htm b/ci-scripts/html-templates/command-table-footer.htm deleted file mode 100644 index 005daf3e2502bfc952fbb6008b006aa0f1c13367..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/command-table-footer.htm +++ /dev/null @@ -1,2 +0,0 @@ - </table> - <br> diff --git a/ci-scripts/html-templates/command-table-header.htm b/ci-scripts/html-templates/command-table-header.htm deleted file mode 100644 index c8322a8b486efa31984b7290149b0c7cc047640a..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/command-table-header.htm +++ /dev/null @@ -1,5 +0,0 @@ - <table class="table-bordered" width = "100%" align = "center" border = 1> - <tr bgcolor = "#33CCFF" > - <th>Command</th> - <th>Status</th> - </tr> diff --git a/ci-scripts/html-templates/command-table-row.htm b/ci-scripts/html-templates/command-table-row.htm deleted file mode 100644 index 54920a35f3d7b172866513105e7150e25d232b06..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/command-table-row.htm +++ /dev/null @@ -1,4 +0,0 @@ - <tr> - <td>COMMAND</td> - <td bgcolor="COLOR">STATUS</td> - </tr> diff --git a/ci-scripts/html-templates/file-footer.htm b/ci-scripts/html-templates/file-footer.htm deleted file mode 100644 index 4d9246de71f35e2f3c30a7111b9c33d8b69f669f..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/file-footer.htm +++ /dev/null @@ -1,4 +0,0 @@ - <div class="well well-lg">End of Test Report -- Copyright <span class="glyphicon glyphicon-copyright-mark"></span> 2023 <a href="http://www.openairinterface.org/">OpenAirInterface</a>. All Rights Reserved.</div> -</div></body> -</html> - diff --git a/ci-scripts/html-templates/image-table-footer.htm b/ci-scripts/html-templates/image-table-footer.htm deleted file mode 100644 index 005daf3e2502bfc952fbb6008b006aa0f1c13367..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/image-table-footer.htm +++ /dev/null @@ -1,2 +0,0 @@ - </table> - <br> diff --git a/ci-scripts/html-templates/image-table-header.htm b/ci-scripts/html-templates/image-table-header.htm deleted file mode 100644 index 1a3ae7f425fe09742d90232751c210c1ab875d66..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/image-table-header.htm +++ /dev/null @@ -1,8 +0,0 @@ - <table class="table-bordered" width = "100%" align = "center" border = 1> - <tr bgcolor = "#33CCFF" > - <th>Container Name</th> - <th>Used Image Tag</th> - <th>OC Image Tag</th> - <th>Image Creation Date</th> - <th>Used Image Size</th> - </tr> diff --git a/ci-scripts/html-templates/image-table-row.htm b/ci-scripts/html-templates/image-table-row.htm deleted file mode 100644 index 7b4ea24d3de2bdac9c5c9c6cbca36331418bfde4..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/image-table-row.htm +++ /dev/null @@ -1,7 +0,0 @@ - <tr> - <td>CONTAINER_NAME</td> - <td>IMAGE_TAG</td> - <td>OC_TAG</td> - <td>CREATION_DATE</td> - <td>IMAGE_SIZE</td> - </tr> diff --git a/ci-scripts/html-templates/image-table-sep.htm b/ci-scripts/html-templates/image-table-sep.htm deleted file mode 100644 index d5ae42a986359be1e66d52bde24f80a8d8d20f2b..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/image-table-sep.htm +++ /dev/null @@ -1,3 +0,0 @@ - <tr bgcolor = "#b0bec5"> - <td colspan="5"> </td> - </tr> diff --git a/ci-scripts/html-templates/list-footer.htm b/ci-scripts/html-templates/list-footer.htm deleted file mode 100644 index 9764941745673aa74779ca62fae24b90dcbce2fa..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/list-footer.htm +++ /dev/null @@ -1 +0,0 @@ - </ul> diff --git a/ci-scripts/html-templates/list-header.htm b/ci-scripts/html-templates/list-header.htm deleted file mode 100644 index 4f783507761b80f31fb29326014065b5b643456d..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/list-header.htm +++ /dev/null @@ -1,2 +0,0 @@ - <br> - <ul class="list-group"> diff --git a/ci-scripts/html-templates/list-row.htm b/ci-scripts/html-templates/list-row.htm deleted file mode 100644 index cea48a26ebfdee2a844f7c8f452debe51851a74b..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/list-row.htm +++ /dev/null @@ -1 +0,0 @@ - <li class="list-group-item"><i class="glyphicon glyphicon-ICON_NAME"></i> ROW_MESSAGE</li> diff --git a/ci-scripts/html-templates/list-sub-footer.htm b/ci-scripts/html-templates/list-sub-footer.htm deleted file mode 100644 index 97fe195a605dcfd7701c4c3a9dd4523e46784d39..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/list-sub-footer.htm +++ /dev/null @@ -1 +0,0 @@ - </ul> diff --git a/ci-scripts/html-templates/list-sub-header.htm b/ci-scripts/html-templates/list-sub-header.htm deleted file mode 100644 index c9b351fd3f1e3cba43d595871b570c5f54384f47..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/list-sub-header.htm +++ /dev/null @@ -1 +0,0 @@ - <ul> diff --git a/ci-scripts/html-templates/list-sub-row.htm b/ci-scripts/html-templates/list-sub-row.htm deleted file mode 100644 index 304b12e1ace00649c9e89aadd8ac098ada2f32a6..0000000000000000000000000000000000000000 --- a/ci-scripts/html-templates/list-sub-row.htm +++ /dev/null @@ -1 +0,0 @@ - <li class="list-group-item">ROW_MESSAGE <span class="badge badge-LEVEL">NUMBER</span></li> diff --git a/ci-scripts/mysql-script.cmd b/ci-scripts/mysql-script.cmd deleted file mode 100644 index 782f979466bf1f64bc595fe3d591291f1cdb1be1..0000000000000000000000000000000000000000 --- a/ci-scripts/mysql-script.cmd +++ /dev/null @@ -1,6 +0,0 @@ -ALTER USER 'root'@'localhost' IDENTIFIED BY 'linux'; -CREATE DATABASE oai_db; -USE oai_db; -SOURCE /home/oai_db.sql -INSERT INTO users VALUES ('208950000000031','380561234567','55000000000001',NULL,'PURGED',50,40000000,100000000,47,0000000000,1,0x0C0A34601D4F07677303652C0462535B,0,0,0x40,'ebd07771ace8677a',0x63bfa50ee6523365ff14c1f45f88737d); -GRANT ALL ON oai_db.* TO root@CICD_AMF_PUBLIC_ADDR IDENTIFIED BY 'linux'; diff --git a/ci-scripts/retrieveLatestTagOnPrivateRepo.py b/ci-scripts/retrieveLatestTagOnPrivateRepo.py index 760d0b6a0a7dd210ddd829806308b6fce017d4f7..c8a6096d48c31531aaebf9db270fcb71503ce2da 100755 --- a/ci-scripts/retrieveLatestTagOnPrivateRepo.py +++ b/ci-scripts/retrieveLatestTagOnPrivateRepo.py @@ -25,8 +25,8 @@ import argparse from datetime import datetime import logging import re -import subprocess import sys +import common.python.cls_cmd as cls_cmd logging.basicConfig( level=logging.INFO, @@ -45,11 +45,12 @@ def main() -> None: tagRoot = 'develop' nbChars = 15 + myCmds = cls_cmd.LocalCmd() cmd = f'curl --insecure -Ss -u oaicicd:oaicicd {PRIVATE_LOCAL_REGISTRY_URL}/v2/{args.repo_name}/tags/list | jq .' - tagList = run_cmd(cmd) + tagList = myCmds.run(cmd, silent=True) latestTag = '' latestDate = datetime.strptime('2022-01-01T00:00:00', '%Y-%m-%dT%H:%M:%S') - for line in tagList.split('\n'): + for line in tagList.stdout.split('\n'): res = re.search(f'"(?P<tag>{tagRoot}-[0-9a-zA-Z]+)"', line) if res is not None: tag = res.group('tag') @@ -57,14 +58,14 @@ def main() -> None: # on other NF / GitLab `git log -1 --pretty=format:"%h"` returns 8 characters if len(tag) == nbChars or len(tag) == (nbChars+1): cmd = f'curl --insecure -Ss -u oaicicd:oaicicd {PRIVATE_LOCAL_REGISTRY_URL}/v2/{args.repo_name}/manifests/{tag} | jq .history' - tagInfo = run_cmd(cmd) - res2 = re.search('"created.*(?P<date>202[0-9-]\-[0-9]+-[0-9]+T[0-9]+:[0-9]+:[0-9]+).*docker_version', tagInfo) + tagInfo = myCmds.run(cmd, silent=True) + res2 = re.search('"created.*(?P<date>202[0-9-]\-[0-9]+-[0-9]+T[0-9]+:[0-9]+:[0-9]+).*docker_version', tagInfo.stdout) if res2 is not None: date = datetime.strptime(res2.group('date'), '%Y-%m-%dT%H:%M:%S') if date > latestDate: latestDate = date latestTag = tag - res2 = re.search('"created.*(?P<date>202[0-9-]\-[0-9]+-[0-9]+T[0-9]+:[0-9]+:[0-9]+).*container_config.*WORKDIR', tagInfo) + res2 = re.search('"created.*(?P<date>202[0-9-]\-[0-9]+-[0-9]+T[0-9]+:[0-9]+:[0-9]+).*container_config.*WORKDIR', tagInfo.stdout) if res2 is not None: date = datetime.strptime(res2.group('date'), '%Y-%m-%dT%H:%M:%S') if date > latestDate: @@ -92,19 +93,5 @@ def _parse_args() -> argparse.Namespace: return parser.parse_args() -def run_cmd(cmd, silent=True): - if not silent: - logging.info(cmd) - result = None - try: - res = subprocess.run(cmd, - shell=True, check=True, - stdout=subprocess.PIPE, - universal_newlines=True) - result = res.stdout.strip() - except: - pass - return result - if __name__ == '__main__': main() diff --git a/ci-scripts/routeCheck.py b/ci-scripts/routeCheck.py deleted file mode 100644 index c0626df70644c0657fab7b5c5249713204b2ed20..0000000000000000000000000000000000000000 --- a/ci-scripts/routeCheck.py +++ /dev/null @@ -1,157 +0,0 @@ -#/* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the OAI Public License, Version 1.1 (the "License"); you may not use this file -# * except in compliance with the License. -# * You may obtain a copy of the License at -# * -# * http://www.openairinterface.org/?page_id=698 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ -#--------------------------------------------------------------------- - -import os -import re -import sys -import subprocess -import yaml -import argparse - -class IpRouteCheck(): - def __init__(self): - self.mode = '' - self.userName = '' - self.hostName = '' - self.file = '' - self.subnet = '' - self.gatwayIp = '' - self.interfaceName = '' - - def _parse_args(self) -> argparse.Namespace: - """Parse the command line args - - Returns: - argparse.Namespace: the created parser - """ - parser = argparse.ArgumentParser(description='Route Add/Delete on the required Server for CI Deployment') - - # Mode - parser.add_argument( - '--mode', - action='store', - required=True, - choices=['Add', 'Delete'], - help='route Add/Delete', - ) - # Server username - parser.add_argument( - '--userName', - action='store', - required=True, - help='server username', - ) - # Server hostname - parser.add_argument( - '--hostName', - action='store', - required=True, - help='server hostname', - ) - # Docker-compose file to use - parser.add_argument( - '--docker_compose', - action='store', - required=True, - help='Docker-compose file to use', - ) - return parser.parse_args() - - def getSubnet(self): - cmd = f"egrep 'subnet' {self.file}" - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding='utf-8') - if ret.returncode == 0: - if ret.stdout is not None: - result = re.search("(?:[0-9]{1,3}[.]){3}[0-9]{1,3}/[0-9]{1,2}", ret.stdout.strip()) - if result is not None: - self.subnet = result.group(0) - #print("found subnet:", self.subnet) - else: - print("subnet not found in docker compose") - sys.exit(-1) - else: - print("docker-compose file not found") - sys.exit(-1) - - def getGatwayIp(self): - cmd = "ifconfig | grep 192.168.18" - ret = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding='utf-8') - if ret.returncode == 0: - result = re.search("inet (?:[0-9]{1,3}[.]){3}[0-9]{1,3}", ret.stdout.strip()) - self.gatwayIp = result.group(0) - #print("Gatway IP Address:", self.gatwayIp) - else: - print("No Gatway IP address starts with 196.168.18") - sys.exit(-1) - - def routeCheck(self): - # Get the interface name - cmd = "ip route | grep 192.168.18.0/24 | awk {'print $3'}" - ret = subprocess.run(f'ssh {self.userName}@{self.hostName} {cmd} || true', shell=True, stdout=subprocess.PIPE, encoding='utf-8') - #print('interface name:', ret.stdout.strip()) - self.interfaceName = ret.stdout.strip() - # Check whether or not ip route exist - cmd = f'ip route | grep -c "{self.subnet}"' - ret = subprocess.run(f'ssh {self.userName}@{self.hostName} {cmd} || true', shell=True, stdout=subprocess.PIPE, encoding='utf-8') - if ret.stdout is not None: - if ret.stdout.strip() == '1': - #print('Route exits') - if IPRC.mode == 'Delete': - IPRC.routeDel() - else: - sys.exit(0) - else: - #print("Route not found") - if IPRC.mode == 'Add': - IPRC.routeAdd() - else: - sys.exit(0) - - def routeAdd(self): - # Add the route - cmd = f"sudo ip route add {self.subnet} via {self.gatwayIp} dev {self.interfaceName}" - ret = subprocess.run(f'ssh {self.userName}@{self.hostName} {cmd} || true', shell=True, stdout=subprocess.PIPE, encoding='utf-8') - print("Added ip route") - - def routeDel(self): - # Delete the route - cmd = f"sudo ip route del {self.subnet} via {self.gatwayIp} dev {self.interfaceName}" - ret = subprocess.run(f'ssh {self.userName}@{self.hostName} {cmd} || true', shell=True, stdout=subprocess.PIPE, encoding='utf-8') - print("Deleted ip route") - -#-------------------------------------------------------------------------------------------------------- -# -# Start of main -# -#-------------------------------------------------------------------------------------------------------- - -IPRC = IpRouteCheck() -args = IPRC._parse_args() - -IPRC.mode = args.mode -IPRC.userName = args.userName -IPRC.hostName = args.hostName -IPRC.file = args.docker_compose - -IPRC.getSubnet() -IPRC.getGatwayIp() -IPRC.routeCheck() \ No newline at end of file diff --git a/ci-scripts/temp/generate_amf_conf.sh b/ci-scripts/temp/generate_amf_conf.sh deleted file mode 100755 index 7a5dbc1eba757f7dc49d52054491d88a41031bf1..0000000000000000000000000000000000000000 --- a/ci-scripts/temp/generate_amf_conf.sh +++ /dev/null @@ -1,56 +0,0 @@ -# prompt has been removed for easier Ctrl+C Ctrl+V -# please update the following information according to your configuration - -INSTANCE=1 -PREFIX='/openair-amf/etc' - -declare -A AMF_CONF - -AMF_CONF[@INSTANCE@]=$INSTANCE -AMF_CONF[@PID_DIRECTORY@]='/var/run' - -AMF_CONF[@MCC@]='208' -AMF_CONF[@MNC@]='95' -AMF_CONF[@REGION_ID@]='128' -AMF_CONF[@AMF_SET_ID@]='1' - -AMF_CONF[@SERVED_GUAMI_MCC_0@]='208' -AMF_CONF[@SERVED_GUAMI_MNC_0@]='95' -AMF_CONF[@SERVED_GUAMI_REGION_ID_0@]='128' -AMF_CONF[@SERVED_GUAMI_AMF_SET_ID_0@]='1' -AMF_CONF[@SERVED_GUAMI_MCC_1@]='460' -AMF_CONF[@SERVED_GUAMI_MNC_1@]='11' -AMF_CONF[@SERVED_GUAMI_REGION_ID_1@]='10' -AMF_CONF[@SERVED_GUAMI_AMF_SET_ID_1@]='1' - -AMF_CONF[@PLMN_SUPPORT_MCC@]='208' -AMF_CONF[@PLMN_SUPPORT_MNC@]='95' -AMF_CONF[@PLMN_SUPPORT_TAC@]='0xa000' -AMF_CONF[@SST_0@]='222' -AMF_CONF[@SD_0@]='123' -AMF_CONF[@SST_1@]='1' -AMF_CONF[@SD_1@]='12' - -AMF_CONF[@AMF_INTERFACE_NAME_FOR_NGAP@]='CI_NGAP_IF_NAME' -AMF_CONF[@AMF_INTERFACE_NAME_FOR_N11@]='CI_N11_IF_NAME' - -AMF_CONF[@SMF_INSTANCE_ID_0@]='1' -AMF_CONF[@SMF_IPV4_ADDR_0@]='CI_SMF0_IP_ADDRESS' -AMF_CONF[@SMF_HTTP_VERSION_0@]='v1' -AMF_CONF[@SMF_INSTANCE_ID_1@]='2' -AMF_CONF[@SMF_IPV4_ADDR_1@]='CI_SMF1_IP_ADDRESS' -AMF_CONF[@SMF_HTTP_VERSION_1@]='v1' - - -AMF_CONF[@MYSQL_SERVER@]='CI_MYSQL_IP_ADDRESS' -AMF_CONF[@MYSQL_USER@]='root' -AMF_CONF[@MYSQL_PASS@]='linux' -AMF_CONF[@MYSQL_DB@]='oai_db' -AMF_CONF[@OPERATOR_KEY@]='63bfa50ee6523365ff14c1f45f88737d' - -for K in "${!AMF_CONF[@]}"; do - egrep -lRZ "$K" $PREFIX/amf.conf | xargs -0 -l sed -i -e "s|$K|${AMF_CONF[$K]}|g" - ret=$?;[[ ret -ne 0 ]] && echo "Tried to replace $K with ${AMF_CONF[$K]}" -done - -echo "AMF Configuration Successful" diff --git a/ci-scripts/temp/generate_smf_conf.sh b/ci-scripts/temp/generate_smf_conf.sh deleted file mode 100755 index 0c3fb7e52e4fadeed862303dfc5b14b9867e25ae..0000000000000000000000000000000000000000 --- a/ci-scripts/temp/generate_smf_conf.sh +++ /dev/null @@ -1,37 +0,0 @@ -# prompt has been removed for easier Ctrl+C Ctrl+V -# please update the following information according to your configuration - -INSTANCE=1 -PREFIX='/openair-smf/etc' - -declare -A SMF_CONF - -SMF_CONF[@INSTANCE@]=$INSTANCE -SMF_CONF[@PID_DIRECTORY@]='/var/run' - -SMF_CONF[@SMF_INTERFACE_NAME_FOR_N4@]='CI_N4_IF_NAME' -SMF_CONF[@SMF_INTERFACE_NAME_FOR_SBI@]='CI_SBI_IF_NAME' - -SMF_CONF[@SMF_INTERFACE_PORT_FOR_SBI@]='80' -SMF_CONF[@SMF_INTERFACE_HTTP2_PORT_FOR_SBI@]='9090' -SMF_CONF[@SMF_API_VERSION@]='v1' - -SMF_CONF[@UDM_IPV4_ADDRESS@]='127.0.0.1' -SMF_CONF[@UDM_PORT@]='80' -SMF_CONF[@UDM_API_VERSION@]='v2' - -SMF_CONF[@AMF_IPV4_ADDRESS@]='CI_AMF_IP_ADDR' -SMF_CONF[@AMF_PORT@]='80' -SMF_CONF[@AMF_API_VERSION@]='v1' - -SMF_CONF[@UPF_IPV4_ADDRESS@]='CI_UPF_IP_ADDR' - -SMF_CONF[@DEFAULT_DNS_IPV4_ADDRESS@]='8.8.8.8' -SMF_CONF[@DEFAULT_DNS_SEC_IPV4_ADDRESS@]='4.4.4.4' - -for K in "${!SMF_CONF[@]}"; do - egrep -lRZ "$K" $PREFIX/smf.conf | xargs -0 -l sed -i -e "s|$K|${SMF_CONF[$K]}|g" - ret=$?;[[ ret -ne 0 ]] && echo "Tried to replace $K with ${SMF_CONF[$K]}" -done - -echo "SMF Configuration Successful" diff --git a/ci-scripts/toCheckDSTesterResult.py b/ci-scripts/toCheckDSTesterResult.py deleted file mode 100644 index 30722bdad9db6b699a61e01a53db60d8e054a623..0000000000000000000000000000000000000000 --- a/ci-scripts/toCheckDSTesterResult.py +++ /dev/null @@ -1,91 +0,0 @@ -#/* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the OAI Public License, Version 1.1 (the "License"); you may not use this file -# * except in compliance with the License. -# * You may obtain a copy of the License at -# * -# * http://www.openairinterface.org/?page_id=698 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ -#--------------------------------------------------------------------- - -import re -import sys -import subprocess -import yaml -import os -import argparse - -def main() -> None: - """Provides the status of the test and copies the results""" - args = _parse_args() - - if args.type == 'mini': - path = '/RESULTS-MINI/dsTester_Summary_mini.txt' - folder = 'RESULTS-MINI/' - elif args.type == 'basic': - path = '/RESULTS-BASIC/dsTester_Summary_basic.txt' - folder = 'RESULTS-BASIC/' - elif args.type == 'slice': - path = '/RESULTS-SLICE/dsTester_Summary_slice.txt' - folder = 'RESULTS-SLICE/' - - locexist = False - cwd = os.getcwd() - try: - with open(cwd + path) as f: - for line in f: - if re.search('Result file is available here', str(line)): - result = re.search('(?:\/.+?\/)(.+?)(?:\/.+)', str(line)) - if result: - result1 = re.search('^(.*/)([^/]*)$', str(result.group(0))) - filename = re.search('[^/]*$', str(result1.group(0))) - subprocess.check_output(f'cp -r {result1.group(1)}* {folder}', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - locexist = True - except IOError: - sys.exit(f"File not accessible to check DSTester Summary: {path}") - - if locexist: - try: - with open(cwd + f'/{folder}{filename.group(0)}') as f: - data = yaml.full_load(f) - if data["final-result"] == 'fail': - sys.exit('DS Tester FrameWork final result FAILED') - except IOError: - sys.exit(f'File not accessible to check DSTester result: {folder}{filename.group(0)}') - else: - sys.exit('Result path is not Available on Console, Something wrong with DSTester') - - -def _parse_args() -> argparse.Namespace: - """Parse the command line args - - Returns: - argparse.Namespace: the created parser - """ - parser = argparse.ArgumentParser(description='Check the status of 5GCN test with DSTester') - - # Type - parser.add_argument( - '--type', - action='store', - required=True, - choices=['mini', 'basic', 'slice'], - help='Type of function', - ) - return parser.parse_args() - - -if __name__ == '__main__': - main() diff --git a/ci-scripts/toCheckDSTesterResult1.py b/ci-scripts/toCheckDSTesterResult1.py deleted file mode 100644 index b2e5cb37a9a75e996523a421aa7ad0ea644247e2..0000000000000000000000000000000000000000 --- a/ci-scripts/toCheckDSTesterResult1.py +++ /dev/null @@ -1,51 +0,0 @@ -#/* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the OAI Public License, Version 1.1 (the "License"); you may not use this file -# * except in compliance with the License. -# * You may obtain a copy of the License at -# * -# * http://www.openairinterface.org/?page_id=698 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ -#--------------------------------------------------------------------- - -import re -import sys -import subprocess -import yaml -import os - -locexist = False -cwd = os.getcwd() -try: - with open(cwd + '/DS-TEST-RESULTS/dsTester_Summary.txt') as f: - for line in f: - if re.search('Result file is available here', str(line)): - result = re.search('(?:\/.+?\/)(.+?)(?:\/.+)', str(line)) - if result: - result1 = re.search('^(.*/)([^/]*)$', str(result.group(0))) - filename = re.search('[^/]*$', str(result1.group(0))) - subprocess.check_output(f'cp -r {result1.group(1)}* DS-TEST-RESULTS/', stderr=subprocess.STDOUT, shell=True, universal_newlines=True) - locexist = True -except IOError: - sys.exit("File not accessible to check DSTester Summary: DS-TEST-RESULTS/dsTester_Summary.txt") - -if locexist: - try: - with open(cwd + f'/DS-TEST-RESULTS/{filename.group(0)}') as f: - data = yaml.full_load(f) - if data["final-result"] == 'fail': - sys.exit('DsTester final result FAILED') - except IOError: - sys.exit(f'File not accessible to check DSTester result: DS-TEST-RESULTS/{filename.group(0)}') diff --git a/docs/CONFIGURE_CONTAINERS.md b/docs/CONFIGURE_CONTAINERS.md index 498cc414dc53b217ab81b004f07a22e7928de101..9d6496f7127883c40037a4cebf9bacc2be03b5ac 100644 --- a/docs/CONFIGURE_CONTAINERS.md +++ b/docs/CONFIGURE_CONTAINERS.md @@ -22,7 +22,7 @@ # 1. Configure the containers # -- **Core Network Configuration**: The [docker-compose-mini-nrf](../docker-compose/docker-compose-mini-nrf.yaml) file has configuration parameters for all the core network components. The file is pre-configured with parameters related to an [example scenario](./DEPLOY_SA5G_MINI_DEPLOYMENT.md). The table contains the location of the configuration files. These files contain allowed configurable parameters. **Keep checking this file up to date as it is possible that we will add new parameters for new features.** +- **Core Network Configuration**: The [docker-compose-mini-nrf](../docker-compose/docker-compose-mini-nrf.yaml) file has configuration parameters for all the core network components. The file is pre-configured with parameters related to an [example scenario](./DEPLOY_SA5G_MINI_WITH_GNBSIM.md). The table contains the location of the configuration files. These files contain allowed configurable parameters. **Keep checking this file up to date as it is possible that we will add new parameters for new features.** | File Name | Repository | Location | |:----------- |:-------------------------------------------- |:--------------- | @@ -119,5 +119,5 @@ Please don't deploy yet! -- If you want to deploy a `minimalist` OAI 5GCN, checkout this [minimalist-deployment tutorial](./DEPLOY_SA5G_MINI_DEPLOYMENT.md). +- If you want to deploy a `minimalist` OAI 5GCN, checkout this [minimalist-deployment tutorial](./DEPLOY_SA5G_MINI_WITH_GNBSIM.md). - If you want to deploy a `basic` OAI 5GCN, checkout this [basic-deployment tutorial](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md). diff --git a/docs/DEBUG_5G_CORE.md b/docs/DEBUG_5G_CORE.md index f9afc95e3f81bb17e1771f14c0bd7245d7c58801..1ff242bd670e0ac3b46930c14e5da1c0e7ad2192 100644 --- a/docs/DEBUG_5G_CORE.md +++ b/docs/DEBUG_5G_CORE.md @@ -14,7 +14,7 @@ Currently there is no special tool for debugging the problems of core network components. Though there are certain methods which we use in our team to diagnose an issue. -This page content expects you to read [deployment pre-requisites](./DEPLOY_PRE_REQUISITES.md) and try to deploy a [mini](./DEPLOY_SA5G_MINI_DEPLOYMENT.md) or [basic](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md) normal deployment. +This page content expects you to read [deployment pre-requisites](./DEPLOY_PRE_REQUISITES.md) and try to deploy a [mini](./DEPLOY_SA5G_MINI_WITH_GNBSIM.md) or [basic](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md) normal deployment. **TABLE OF CONTENTS** @@ -169,7 +169,7 @@ The healthchecks can be directly used from [here](../docker-compose/healthscript # 2.2.4 Creating docker-compose -To run this docker-compose the network `demo-oai-public-net` should be created. To know how to create the network [follow](./DEPLOY_SA5G_MINI_DEPLOYMENT.md#3-configuring-host-machines) +To run this docker-compose the network `demo-oai-public-net` should be created. ``` version: '3.8' @@ -288,8 +288,7 @@ Network components configuration is present in `~/oai-docker-compose/confs/` the 1. Building the images in debug mode will provide more information about UE attach-detach process. 2. Capture packets to understand message flow between the components and encapsulation-decapsulation. -3. The captured packets can be compared with the end-to-end pcaps which are available in this [tutorial](./DEPLOY_SA5G_MINI_DEPLOYMENT.md) -4. Check the UE subscription information is available in the Mysql database and the OPC is correctly configured in AMF. +3. Check the UE subscription information is available in the Mysql database and the OPC is correctly configured in AMF. # 4. How to report an issue? diff --git a/docs/DEPLOY_HOME.md b/docs/DEPLOY_HOME.md index 1fa7edc6e04529ac3dc1fd9d72d03229b2f83536..d35726e630f046c6efbe4e8c591ba70cfbc76282 100644 --- a/docs/DEPLOY_HOME.md +++ b/docs/DEPLOY_HOME.md @@ -14,7 +14,6 @@ Welcome to the tutorial home page of the OAI 5g Core project. Here you can find - [Build the container images](./BUILD_IMAGES.md) - [Configuring the Containers](./CONFIGURE_CONTAINERS.md) - 5G Core Network Deployment - - [Using Docker-Compose, perform a `minimalist` deployment](./DEPLOY_SA5G_MINI_DEPLOYMENT.md) - [Using Docker-Compose, perform a `basic` deployment](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md) - [Using Docker-Compose, perform a `basic-vpp` deployment with `VPP` implementation of UPF](./DEPLOY_SA5G_WITH_VPP_UPF.md) - [Using Docker-Compose, perform a `basic` deployment with `SD-Fabric` implementation of UPF](https://gitlab.eurecom.fr/oai/cn5g/oai-cn5g-upf-sdfabric/-/wikis/Deployment-using-Docker) @@ -23,12 +22,14 @@ Welcome to the tutorial home page of the OAI 5g Core project. Here you can find - [Using Docker-Compose, doing network slicing](./DEPLOY_SA5G_SLICING.md) - 5G Core Network Deployment and Testing with Ran Emulators - [Using Docker-Compose, perform a `basic` deployment and test with `OAI RF simulator`](https://gitlab.eurecom.fr/oai/openairinterface5g/-/tree/develop/ci-scripts/yaml_files/5g_rfsimulator) - - [Using Docker-Compose, perform a `minimalist` deployment and test with `gnbsim`](./DEPLOY_SA5G_WITH_GNBSIM.md) + - [Using Docker-Compose, perform a `minimalist` deployment and test with `gnbsim`](./DEPLOY_SA5G_MINI_WITH_GNBSIM.md) - [Using Docker-Compose, perform a `basic` deployment and test with `UERANSIM`](./DEPLOY_SA5G_WITH_UERANSIM.md) - [Using Docker-Compose, perform a `basic` deployment and test with `My5g-RANTester`](./DEPLOY_SA5G_WITH_My5g-RANTester.md) - [Using Docker-Compose, perform a `basic` deployment and test with `omec-gnbsim`](./DEPLOY_SA5G_WITH_OMEC_GNBSIM.md) - [Using Docker-Compose, when testing with Commercial UE, troubleshoot traffic issues](./TROUBLESHOOT_COTS_UE_TRAFFIC.md) - [Using Docker-Compose, perform a `basic` UL/CL deployment and test with `gnbsim`](./DEPLOY_SA5G_ULCL.md) +- Connecting a real RAN to OAI 5G Core Network + - [Network Considerations](./NETWORK_CONSIDERATIONS.md) - The Developers Corner - [How to Deploy Developers Core Network and Basic Debugging](./DEBUG_5G_CORE.md) - [Advance Deployment of OAI 5G Core](./ADVANCE_DEPLOYMENT.md) diff --git a/docs/DEPLOY_SA5G_BASIC_STATIC_UE_IP.md b/docs/DEPLOY_SA5G_BASIC_STATIC_UE_IP.md index d0c605c15b44ec2d0e7e725a52f957f20dc71502..28c97c98e09197ddfefa2533dac0d664209b9017 100644 --- a/docs/DEPLOY_SA5G_BASIC_STATIC_UE_IP.md +++ b/docs/DEPLOY_SA5G_BASIC_STATIC_UE_IP.md @@ -102,7 +102,7 @@ For now these two entries are present in the database file ## 3. Deploying OAI 5g Core Network -In the previous tutorial we explain how to deploy the core network using our [python deployer](../docker-compose/core-network.py). Here we will only provide quick commands needed to deploy the core network, to learn how to use the python deployer please follow [this page](./DEPLOY_SA5G_MINI_DEPLOYMENT.md). +In the previous tutorial we explain how to deploy the core network using our [python deployer](../docker-compose/core-network.py). Here we will only provide quick commands needed to deploy the core network, to learn how to use the python deployer please follow [this page](./DEPLOY_SA5G_MINI_WITH_GNBSIM.md). - Start the core network components, check which scenario you are using with nrf or without nrf diff --git a/docs/DEPLOY_SA5G_MINI_DEPLOYMENT.md b/docs/DEPLOY_SA5G_MINI_DEPLOYMENT.md deleted file mode 100644 index 76aeeda64f631ed0be2068f70701b5d4e76e08f0..0000000000000000000000000000000000000000 --- a/docs/DEPLOY_SA5G_MINI_DEPLOYMENT.md +++ /dev/null @@ -1,496 +0,0 @@ -<table style="border-collapse: collapse; border: none;"> - <tr style="border-collapse: collapse; border: none;"> - <td style="border-collapse: collapse; border: none;"> - <a href="http://www.openairinterface.org/"> - <img src="./images/oai_final_logo.png" alt="" border=3 height=50 width=150> - </img> - </a> - </td> - <td style="border-collapse: collapse; border: none; vertical-align: center;"> - <b><font size = "5">OpenAirInterface 5G Core Network Minimalist Deployment using Docker-Compose</font></b> - </td> - </tr> -</table> - - - - -**Caution:** This is an old tutorial so artifacts like logs and pcap will not be from the new version of the core network but the docker-compose files are up to date. - -**OVERVIEW** - -This tutorial will help in understanding how to deploy a `minimalist` OAI core network using docker-compose. - -Please follow the tutorial step by step to create a stable working testbed. - -**Reading time: ~20mins** - -**Tutorial replication time: ~1hr** - -**TABLE OF CONTENTS** - -1. [Minimalist Deployment Flavours](#1-minimalist-deployment-flavours) -2. [Pre-requisites](#2-pre-requisites) -3. [Network Function Container Images](#2-network-function-container-images) -4. [Configuring Host Machines](#4-configuring-host-machines) -5. [Configuring OAI 5G Core Network Functions](#5-configuring-the-oai-5g-core-network-functions) -6. [Configuring dsTest Scenario](#6-configuring-dstester-scenario) -7. [Deploying OAI 5G Core Network](#7-deploying-oai-5g-core-network) -8. [Executing dsTest Scenario](#8-executing-the-dstest-scenario) -9. [Analysing Scenario Results](#9-analysing-the-scenario-results) -10. [Demo Video](#10-demo-video) -11. [Notes](#11-notes) - - -## 1. Minimalist Deployment Flavours ## - -The `Minimalist` functional 5g core network can be deployed into 2 scenarios: - - - Scenario I: AMF, SMF, UPF (SPGWU), NRF, MYSQL - - Scenario II: AMF, SMF, UPF (SPGWU), MYSQL - -## 2. Pre-requisites ## - -The container images are built using the command `docker build` on a Ubuntu 18.04 host machine. The base image for all the containers is Ubuntu 18.04. - -The required software and their respective versions are listed below. To replicate the testbed use these versions. - - -| Software | Version | -|:-------------------------- |:------------------------------- | -| docker engine | 19.03.6, build 369ce74a3c | -| docker-compose | 1.27.4, build 40524192 | -| Host operating system | Ubuntu 18.04.4 LTS | -| Container operating system | Ubuntu 18.04 | -| tshark | 3.4.4 (Git commit c33f6306cbb2) | -| wireshark | 3.4.4 (Git commit c33f6306cbb2) | - -### 2.1. Wireshark ### - -The new version of `wireshark` may not be available in the ubuntu repository. - -- So it is better to build it from source. - -You may also use the developer PPA: - -```bash -sudo add-apt-repository ppa:wireshark-dev/stable -sudo apt update -sudo apt install wireshark - -wireshark --version -Wireshark 3.4.7 (Git v3.4.7 packaged as 3.4.7-1~ubuntu18.04.0+wiresharkdevstable1) -``` - -### 2.2. Networking considerations ### - -Most of the times the `docker-compose-host` machine is not configured with packet forwarding. It can be done using the command below (if you have already done it in any other section then don't repeat). - -**This is the most important step towards end-to-end connectivity.** - -```bash -(docker-compose-host)$ sudo sysctl net.ipv4.conf.all.forwarding=1 -(docker-compose-host)$ sudo iptables -P FORWARD ACCEPT -``` - -To know how to configure the machine with the above requirements vist [pre-requisites](./DEPLOY_PRE_REQUISITES.md) page. - -## 3. Network Function Container Images ## - -- In this demo the network function branch and tags which were used are listed below, follow the [Retrieving images](./RETRIEVE_OFFICIAL_IMAGES.md) or [Building images](./BUILD_IMAGES.md) to build images with the tags below. - -| CNF Name | Branch Name | Tag | Ubuntu 18.04 | RHEL8 (UBI8) | -| ----------- | ----------- | -------- | ------------ | ----------------| -| AMF | `master` | `v1.5.0` | X | X | -| SMF | `master` | `v1.5.0` | X | X | -| NRF | `master` | `v1.5.0` | X | X | -| SPGW-U-TINY | `master` | `v1.5.0` | X | X | - -- In case readers are interested in using different branches than develop or releases (example v1.5.0) **they have to build images from scratch, they can't use the docker-hub images**. - -## 4. Configuring Host Machines ## - -All the network functions are connected using `demo-oai` bridge. - -There are two ways to create this bridge, either manually or automatically using docker-compose. - -* The manual version will allow packet capturing while network functions are getting deployed. So the initial tested setup packets can be captured for debugging purposes or checking if network functions registered properly to NRF. -* The second option of automatic deployment is good when initial packet capture is not important. - -**NOTE** This tutorial needs the bridge to be created manually to analyse NRF packet exchange. - -### 4.1 Creating the bridge manually ### - -Since this is not the `default` behavior, you **SHALL** edit the docker-compose file. - -- The bottom section of [docker-compose file](../docker-compose/docker-compose-mini-nrf.yaml) SHALL look like this: - -``` - networks: - public_net: - external: - name: demo-oai-public-net - # public_net: - # driver: bridge - # name: demo-oai-public-net - # ipam: - # config: - # - subnet: 192.168.70.128/26 - # driver_opts: - # com.docker.network.bridge.name: "demo-oai" -``` - -- The `docker-compose-host` machine needs to be configured with `demo-oai` bridge before deploying core network components to capture initial message exchange between smf<-->nrf<-->upf. - - ```bash - (docker-compose-host)$ docker network create \ - --driver=bridge \ - --subnet=192.168.70.128/26 \ - -o "com.docker.network.bridge.name"="demo-oai" \ - demo-oai-public-net - 455631b3749ccd6f10a366cd1c49d5a66cf976d176884252d5d88a1e54049bc5 - (docker-compose-host)$ ifconfig demo-oai - demo-oai: flags=4099<UP,BROADCAST,MULTICAST> mtu 1500 - inet 192.168.70.129 netmask 255.255.255.192 broadcast 192.168.70.191 - RX packets 0 bytes 0 (0.0 B) - RX errors 0 dropped 0 overruns 0 frame 0 - TX packets 0 bytes 0 (0.0 B) - TX errors 0 dropped 0 overruns 0 carrier 0 collisions 0 - (docker-compose-host)$ docker network ls - NETWORK ID NAME DRIVER SCOPE - d2d34e05bb2d bridge bridge local - 455631b3749c demo-oai-public-net bridge local - ``` - -### 4.2 Creating the bridge automatically ### - -- Though the bridge can be automatically created using docker-compose file if there is no need to capture initial packets. - -This is the `default` version in the [docker-compose-mini-nrf.yaml](../docker-compose/docker-compose-mini-nrf.yaml) or `docker-compose-mini-nonrf.yaml`. - -The bottom section SHALL look like this: - - ``` - networks: - # public_net: - # external: - # name: demo-oai-public-net - public_net: - driver: bridge - name: demo-oai-public-net - ipam: - config: - - subnet: 192.168.70.128/26 - driver_opts: - com.docker.network.bridge.name: "demo-oai" - ``` - -### 4.3 In case you forgot. True for manual or automatic network creation. ### - -- If the `docker-compose-host` machine is not configured with packet forwarding then it can be done using the command below (**important step**), - - ```bash - (docker-compose-host)$ sudo sysctl net.ipv4.conf.all.forwarding=1 - (docker-compose-host)$ sudo iptables -P FORWARD ACCEPT - ``` - -- The `dsTest-host` needs to be configured with a route to reach `docker-compose-host`. Assuming the `dsTest-host` physical interface which is connected with `docker-compose-host` is NIC1 and the ip-address of this interface is IP_ADDR_NIC1 then, - - ```bash - (dsTest-host)$ sudo ip route add 192.168.70.128/26 \ - via IP_ADDR_NIC1\ - dev NIC1_NAME - ``` - -- To verify, ping the ip-address of the `docker-compose-host` interface connected to demo-oai bridge, if possible also ping amf from the dsTest-host machine. - - ```bash - (dsTest-host)$ ping 192.168.70.129 - PING 192.168.70.129 (192.168.70.129) 56(84) bytes of data. - 64 bytes from 192.168.70.129: icmp_seq=1 ttl=64 time=0.260 ms - 64 bytes from 192.168.70.129: icmp_seq=2 ttl=64 time=0.147 ms - 64 bytes from 192.168.70.129: icmp_seq=3 ttl=64 time=0.187 ms - 64 bytes from 192.168.70.129: icmp_seq=4 ttl=64 time=0.187 ms - 64 bytes from 192.168.70.129: icmp_seq=5 ttl=64 time=0.181 ms - ^C - --- 192.168.70.129 ping statistics --- - 5 packets transmitted, 5 received, 0% packet loss, time 108ms - rtt min/avg/max/mdev = 0.147/0.192/0.260/0.038 ms - ``` - -## 5. Configuring the OAI-5G Core Network Functions ## - -### 5.1. Core Network Configuration ### - -The docker-compose file has configuration parameters for each core network component. The file is pre-configured with parameters related to this scenario. The following table contains the location of the configuration files. These files contain the allowed configurable parameters. **Keep this file up to date as it is possible that we will add new parameters for new features.** - -| File Name | Repository | Location | -| ----------- | -------------------------------------------- | --------------- | -| amf.conf | (Gitlab) cn5g/oai-cn5g-amf | [etc/amf.conf](https://gitlab.eurecom.fr/oai/cn5g/oai-cn5g-amf/-/blob/master/etc/amf.conf) | -| smf.conf | (Gitlab) cn5g/oai-cn5g-smf | [etc/smf.conf](https://gitlab.eurecom.fr/oai/cn5g/oai-cn5g-smf/-/blob/master/etc/smf.conf) | -| nrf.conf | (Gilab) cn5g/oai-cn5g-nrf | [etc/nrf.conf](https://gitlab.eurecom.fr/oai/cn5g/oai-cn5g-nrf/-/blob/master/etc/nrf.conf) | -| spgw_u.conf | (Github) OPENAIRINTERFACE/openair-spgwu-tiny | [etc/spgw_u.conf](https://github.com/OPENAIRINTERFACE/openair-spgwu-tiny/blob/master/etc/spgw_u.conf) | - -### 5.2. User Subscription Profile ### - -The dsTest UE which will request a PDU session will have this user profile. Verify that this entry is present in the oai_db1.sql file located in [docker-compose/database/oai-db1.sql](../docker-compose/database/oai_db1.sql). - - ``` - IMSI - 208950000000031 - IMEI - 55000000000001 - Secret Key (K) - 0x0C0A34601D4F07677303652C0462535B - OPc - 0x63bfa50ee6523365ff14c1f45f88737d - ``` - -- **Optional**: In case the user subscription entry is missing from the oai_db1.sql file, then it can be added using the following commands, - - ```bash - #Login to mysql container once the container is running - (docker-compose-host)$ docker exec -it mysql /bin/bash - (mysql-container)$ mysql -uroot -plinux -D oai_db - mysql> INSERT INTO users VALUES - ('208950000000031','380561234567','55000000000001',NULL,'PURGED',50,40000000,100000000,47,0000000000,1,0x0C0A34601D4F07677303652C0462535B,0,0,0x40,'ebd07771ace8677a',0x63bfa50ee6523365ff14c1f45f88737d); - ``` - -## 6. Configuring DsTester Scenario ## - -- **User Subscription Profile**: The user profile used for the dsTest scenario is below. Verify that this entry is present in the oai_db1.sql file located in docker-compose/database/oai_db1.sql. - - ``` - IMSI - 208950000000031 - IMEI - 55000000000001 - Secret Key (K) - 0x0C0A34601D4F07677303652C0462535B - OPc - 0x63bfa50ee6523365ff14c1f45f88737d - ``` - -- **gNB Parameters for dsTest**: - - ``` - TAC - 0xa000 - MCC - 208 - MNC - 95 - NSSAI SST - 222 - NSSAI SD - 123 - ``` - -- [The SmartEvents State Machine](https://www.developingsolutions.com/Help/Topics/SmartFlow-SmartEvents-State-Machines.htm) used for this dsTest scenario is below, the number on each arrow between different states depicts transition interval in milliseconds. - - - - - -## 7. Deploying OAI 5g Core Network ## - -- The core network is deployed using a [python script](../docker-compose/core-network.py) which is a wrapper around the `docker-compose` and `docker` commands. -- The script informs the user when the core-network is correctly configured by checking the health status of the containers and connectivity between different core network components. -- In case there is a problem using the script, then use docker-compose manually, read the [notes section](#11-notes) -- If the script is executed without any arguments then the help menu is shown - - ```bash - docker-compose-host $: pwd - /home/<docker-compose-host>/oai/oai-cn-fed/docker-compose - docker-compose-host $: python3 core-network.py --help - - usage: core-network.py [-h] --type {start-mini,start-basic,start-basic-vpp,stop-mini,stop-basic,stop-basic-vpp} [--scenario {1,2}] [--capture CAPTURE] - - OAI 5G CORE NETWORK DEPLOY - - optional arguments: - -h, --help show this help message and exit - --type {start-mini,start-basic,start-basic-vpp,stop-mini,stop-basic,stop-basic-vpp}, -t {start-mini,start-basic,start-basic-vpp,stop-mini,stop-basic,stop-basic-vpp} - Functional type of 5g core network ("start-mini"|"start-basic"|"start-basic-vpp"|"stop-mini"|"stop-basic"|"stop-basic-vpp") - --scenario {1,2}, -s {1,2} - Scenario with NRF ("1") and without NRF ("2") - --capture CAPTURE, -c CAPTURE - Add an automatic PCAP capture on docker networks to CAPTURE file - - example: - python3 core-network.py --type start-mini - python3 core-network.py --type start-basic - python3 core-network.py --type start-basic-vpp - python3 core-network.py --type stop-mini - python3 core-network.py --type start-mini --scenario 2 - python3 core-network.py --type start-basic --scenario 2 - ``` -- Before executing the script, it is better to start capturing packets to see the message flow between smf <--> nrf <--> upf. The packets will be captured on **demo-oai** bridge which should be configured on the `docker-compose-host` machine. - - ```bash - (docker-compose-host)$ sudo tshark -i demo-oai \ - -f "not arp and not port 53 and not host archive.ubuntu.com and not host security.ubuntu.com" \ - -w /tmp/5gcn-mini-deployment-nrf.pcap - ``` - -- Explanation on the capture filter: - * `not arp` : Not capturing ARP traffic - * `not port 53` : Not capturing DNS traffic - * `not host archive.ubuntu.com and not host security.ubuntu.com` : Not capturing traffic from `oai-ext-dn` container when installing tools -- Starting the core network components, - - ```bash - (docker-compose-host)$ $ python3 ./core-network.py --type start-mini --fqdn no --scenario 1 - [2021-09-14 16:43:45,980] root:DEBUG: Starting 5gcn components... Please wait.... - Creating oai-nrf ... done - Creating mysql ... done - Creating oai-amf ... done - Creating oai-smf ... done - Creating oai-spgwu ... done - Creating oai-ext-dn ... done - - [2021-09-14 16:44:10,098] root:DEBUG: OAI 5G Core network started, checking the health status of the containers... takes few secs.... - [2021-09-14 16:44:47,025] root:DEBUG: All components are healthy, please see below for more details.... - Name Command State Ports - ----------------------------------------------------------------------------------------- - mysql docker-entrypoint.sh mysqld Up (healthy) 3306/tcp, 33060/tcp - oai-amf /bin/bash /openair-amf/bin ... Up (healthy) 38412/sctp, 80/tcp, 9090/tcp - oai-ext-dn /bin/bash -c apt update; ... Up - oai-nrf /bin/bash /openair-nrf/bin ... Up (healthy) 80/tcp, 9090/tcp - oai-smf /bin/bash /openair-smf/bin ... Up (healthy) 80/tcp, 8805/udp, 9090/tcp - oai-spgwu /openair-spgwu-tiny/bin/en ... Up (healthy) 2152/udp, 8805/udp - [2021-09-14 16:44:47,025] root:DEBUG: Checking if the containers are configured.... - [2021-09-14 16:44:47,025] root:DEBUG: Checking if SMF and UPF registered with nrf core network.... - [2021-09-14 16:44:47,059] root:DEBUG: For example: oai-smf Registration with oai-nrf can be checked on this url /nnrf-nfm/v1/nf-instances?nf-type="SMF" {"_links":{"item":[{"href":"192.168.70.133"}],"self":""}}.... - [2021-09-14 16:44:47,059] root:DEBUG: SMF and UPF are registered to NRF.... - [2021-09-14 16:44:47,059] root:DEBUG: Checking if SMF is able to connect with UPF.... - [2021-09-14 16:44:47,176] root:DEBUG: UPF receiving heathbeats from SMF.... - [2021-09-14 16:44:47,176] root:DEBUG: OAI 5G Core network is configured and healthy.... - ``` - -## 8. Executing the dsTest Scenario ## - -- **Scenario Execution**: On the dsTest host run the scenario either using the dsClient GUI or command line. Below are the commands to run it using the command line. - - ```bash - (dsTest-host)$ dsClient -d 127.0.0.1 -c "source dsTestScenario.xml" - ``` -- **Verify PDN session establishment**: To check if a PDN session is properly established, there is an extra external data network container only for this demo purpose. The dsTest UE can be reached using this container to validate the PDN session establishment. To understand the packet flow, read the next analysis section. In our settings the UE network is 12.1.1.0/24. The configuration can be seen in smf.conf and spgw_u.conf. The allocated IP address to dsTest UE can be seen in smf logs. Generally, if there is a single UE then the allocated ip address will be 12.1.1.2. - - ```bash - (docker-compose-host)$ docker exec -it oai-ext-dn ping 12.1.1.2 - 64 bytes from 12.1.1.2: icmp_seq=3 ttl=63 time=0.565 ms - 64 bytes from 12.1.1.2: icmp_seq=4 ttl=63 time=0.629 ms - 64 bytes from 12.1.1.2: icmp_seq=5 ttl=63 time=0.542 ms - 64 bytes from 12.1.1.2: icmp_seq=6 ttl=63 time=0.559 ms - ^c - ``` - -- **Stop PCAP collection**: Stop the wireshark or tshark process on the docker-compose-host. - -- **Undeploy the core network**: Before undeploying collect all the logs from each component for analysis. - - ```bash - (docker-compose-host)$ docker logs oai-amf > amf.log - (docker-compose-host)$ docker logs oai-smf > smf.log - (docker-compose-host)$ docker logs oai-nrf > nrf.log - (docker-compose-host)$ docker logs oai-spgwu > spgwu.log - (docker-compose-host)$ python3 ./core-network.py --type stop-mini - [2021-09-14 16:46:45,137] root:DEBUG: UnDeploying OAI 5G core components.... - Stopping oai-ext-dn ... done - Stopping oai-spgwu ... done - Stopping oai-smf ... done - Stopping oai-amf ... done - Stopping mysql ... done - Stopping oai-nrf ... done - Removing oai-ext-dn ... done - Removing oai-spgwu ... done - Removing oai-smf ... done - Removing oai-amf ... done - Removing mysql ... done - Removing oai-nrf ... done - Network demo-oai-public-net is external, skipping - - [2021-09-14 16:47:44,070] root:DEBUG: OAI 5G core components are UnDeployed.... - ``` - -- If you chose the `manual` networking option, do not forget to remove the network as well: - - ```bash - (docker-compose-host)$ docker network rm demo-oai-public-net - demo-oai-public-net - ``` - -## 9. Analysing the Scenario Results ## - -This section is subdivided in two parts: - -- the first part for analysing the message exchange between core network components at the time of deployment. -- the second part for analysing the dsTest stimuli (ie gNB and UE connections). - -| Container | Ip-address | -| ------------- |:-------------- | -| mysql | 192.168.70.131 | -| oai-amf | 192.168.70.132 | -| oai-smf | 192.168.70.133 | -| oai-nrf | 192.168.70.130 | -| oai-spgwu | 192.168.70.134 | -| oai-ext-dn | 192.168.70.135 | -| Host Machine | 192.168.70.129 | -| dsTest gNB/UE | 192.168.18.184 | - -We are providing both scenarios (with and without `NRF`) as reference but we will only analyze the scenario with `NRF`. - -| PCAP/LOG files for Mini w/ NRF | PCAP/LOG files for Mini w/o NRF | -| ----------------------------------------------------------------------------------------- | ------------------------------- | -| [5gcn-mini-deployment-nrf.pcap](./results/dsTest/mini-nrf/5gcn-mini-deployment-nrf.pcap) | [5gcn-mini-deployment-no-nrf.pcap](./results/dsTest/mini-no-nrf/5gcn-mini-deployment-no-nrf.pcap) | -| [amf.log](./results/dsTest/mini-nrf/amf.log) | [amf.log](./results/dsTest/mini-no-nrf/amf.log) | -| [smf.log](./results/dsTest/mini-nrf/smf.log) | [smf.log](./results/dsTest/mini-no-nrf/smf.log) | -| [nrf.log](./results/dsTest/mini-nrf/nrf.log) | | -| [spgwu.log](./results/dsTest/mini-nrf/spgwu.log) | [spgwu.log](./results/dsTest/mini-no-nrf/spgwu.log) | - - -### 9.1. Analysing initial message exchange ### - -Using wireshark, open `5gcn-mini-deployment-nrf.pcap` and use the filter `http || pfcp` - -- SMF request to NRF for subscribing to UPF registration/de-registration events: Packet `37`, `POST` request -- SMF registration with NRF: Packet `47`, `PUT` request -- UPF(SPGWU) registration with NRF: Packet `60`, `PUT` request -- NRF notification to SMF for UPF registration: Packet `65`, `POST` request -- SMF <--> UPF PFCP Association Setup request and response: Packets `67`, `73` -- Message exchange between SMF, NRF and UPF can be seen in nrf.log but the name of the network function is replaced with a unique identifier (UUID). - - - - -### Analysing scenario execution ### - -Using wireshark, open `5gcn-mini-deployment-nrf.pcap` and use the filter `ngap || http || pfcp || gtp` - -- NG Setup request reponse: Packets `311`, `313` -- Initial UE Message registration request: Packet `315` -- Authentication and security processL: Packets `339-372` -- Registration complete: Packet `373` -- PDU session establishment request: Packet `396` -- AMF requesting NRF for SMF address: Packet `384` & `386` GET Request -- AMF <--> SMF PDU session request: Packet `394` & `397` POST Request -- SMF <--> UPF PFCP session establishment: Packet `396` & `402` -- Allocated UE IP-address can be seen in Packet `396`, `413`, `420` - - - -- UE PDU session release request: Packet `495` -- AMF <--> SMF PDU session release request: Packet `500` -- NGreset : Packet `552` - - - - -## 10. Demo Video ## - -- Here is the link to the [youtube video](https://www.youtube.com/watch?v=ENQiwl2EYl8) - -## 11. Notes ## - -- The `oai-ext-dn` container is optional and is only required if the user wants to ping the dsTest UE. In general this container is not required except for testing purposes. -- This tutorial can be taken as a reference to test the OAI 5G core with a COTS UE. The configuration file has to be changed according to the gNB and COTS UE information should be present in the mysql database. -- Generally, in a COTS UE two PDN sessions are created by default so configure the IMS in SMF properly. -- In case you want to deploy debuggers/developers core network environment with more logs please follow [this tutorial](./DEBUG_5G_CORE.md). -- It is not necessary to use [core-network.py](../docker-compose/core-network.py) Python script, it is possible to directly deploy using `docker-compose` command. - - ``` console - #To start the containers - docker-compose-host $: docker-compose -f <file-name> up -d - #To check their health status and wait till the time they are healthy, you ctrl + c to exit watch command - docker-compose-host $: watch docker-compose -f <file-name> ps -a - #To stop the containers with zero graceful period - docker-compose-host $: docker-compose -f <file-name> down -t 0 - ``` diff --git a/docs/DEPLOY_SA5G_WITH_GNBSIM.md b/docs/DEPLOY_SA5G_MINI_WITH_GNBSIM.md similarity index 96% rename from docs/DEPLOY_SA5G_WITH_GNBSIM.md rename to docs/DEPLOY_SA5G_MINI_WITH_GNBSIM.md index 0786518f5fdc082919f323d67f92584e71bc1fc8..32a19e3a316a2c3a6e0b0f809346a1205664943f 100644 --- a/docs/DEPLOY_SA5G_WITH_GNBSIM.md +++ b/docs/DEPLOY_SA5G_MINI_WITH_GNBSIM.md @@ -12,7 +12,7 @@ </tr> </table> - + **Reading time: ~ 20mins** @@ -44,9 +44,7 @@ Note: In case readers are interested in deploying debuggers/developers core netw <br/> -This tutorial is an extension of a previous tutorial: [testing a `minimalist` deployment using Docker-Compose](./DEPLOY_SA5G_MINI_DEPLOYMENT.md). In this previous tutorial, we saw the advanced testing tool dsTester, which is useful for validating even more complex scenarios. - -Moreover, there are various other opensource gnb/ue simulator tools that are available for SA5G testing. In this tutorial, we use an opensource simulator tool called `gnbsim`. With the help of the `gnbsim` tool, we can perform very basic SA5G tests by simulating one gnb and one ue. +In this tutorial, we use an opensource simulator tool called `gnbsim`. With the help of the `gnbsim` tool, we can perform very basic SA5G tests by simulating one gnb and one ue. **About gnbsim:** @@ -55,7 +53,7 @@ Gnbsim is a 5G SA gNB/UE (Rel. 16) simulator for testing 5G System. It is a 3rd Let's begin !! * Steps 1 to 5 are similar to the previous tutorial. Please follow these steps to deploy OAI 5G core network components. -* We deploy the gnbsim docker service on the same host as for core network, so there is no need to create an additional route as we did for dsTest-host. +* We deploy the gnbsim docker service on the same host as for core network, so there is no need to create an additional route as we did for gnb-host. * Before we proceed further, for end-to-end SA5G testing, make sure you have healthy docker services for OAI cn5g. ## 1. Pre-requisites @@ -94,8 +92,6 @@ For CI purposes, we are deploying with an automated PCAP capture on the docker n docker-compose-host $: python3 ./core-network.py --type start-mini --scenario 1 --capture /tmp/oai/mini-gnbsim/mini-gnbsim.pcap ``` -More details in [section 7 of the `minimalist` tutorial](./DEPLOY_SA5G_MINI_DEPLOYMENT.md#7-deploying-oai-5g-core-network). - ``` console oai-cn5g-fed/docker-compose$ docker ps -a CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES @@ -300,8 +296,6 @@ docker-compose-host $: docker logs gnbsim > /tmp/oai/mini-gnbsim/gnbsim.log 2>&1 | [nrf.log](./results/dsTest/logs/nrf.log) | | [spgwu.log](./results/dsTest/logs/spgwu.log) -* For detailed analysis of messages, please refer to the previous tutorial [testing with dsTester](./docs/DEPLOY_SA5G_WITH_DS_TESTER.md). - ## 9. Trying Some Advanced Stuff Here we try some scaling testing with gnbsim. There are additional IMSIs added into the database (208950000000031-208950000000040). Now we create a few more gnbsim instances (4 more for now). We use the same script to generate additional instances as follow - diff --git a/docs/DEPLOY_SA5G_SLICING.md b/docs/DEPLOY_SA5G_SLICING.md index 301463ad948353046b252f6a23dace10f6fce003..48f02b421971b7dea6d335e499545a24d32a85d6 100644 --- a/docs/DEPLOY_SA5G_SLICING.md +++ b/docs/DEPLOY_SA5G_SLICING.md @@ -59,7 +59,7 @@ In this tutorial we are going to explore the slicing feature of OAI 5G core netw * SMF selection based on S-NSSAI * NRF selection based on S-NSSAI (With help of NSSF) * AMF selection based on S-NSSAI (With help of NSSF - Next Release, March 2022) -* Single UE with multiple S-NSSAIs (With the help of commercial tool dsTest) +* Single UE with multiple S-NSSAIs A Network Slice is defined within a PLMN and it incorporates the 5G Core and 5G RAN components. Network slice is identified as Single Network Slice Selection Assistance Information (S-NSSAI). A S-NSSAI consists of Slice/Service type (SST) and Slice Differentiator (SD). SST is a mandatory field which defines the expected slice behavior in terms of features and services. Whereas SD is an optional field which can be used to differentiate amongst multiple slices. @@ -83,7 +83,7 @@ Here AMF, NSSF, UDM, UDR, AUSF are common to all slices. SMF and UPF in slice 1 **Let's begin !!** -* Steps 1 to 4 are similar to previous tutorials such as [minimalist](./DEPLOY_SA5G_MINI_DEPLOYMENT.md) or [basic](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md) deployments. Please follow these steps to deploy OAI 5G core network components. +* Steps 1 to 4 are similar to previous tutorials such as [minimalist](./DEPLOY_SA5G_MINI_WITH_GNBSIM.md) or [basic](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md) deployments. Please follow these steps to deploy OAI 5G core network components. ## 1. Pre-requisites @@ -386,8 +386,6 @@ docker-compose-host $: docker logs ueransim > /tmp/oai/slicing-with-nssf/ueransi ## 10. UE with multiple S-NSSAIs OAI 5G CN also supports UE with multiple slices. Apparently the ran simulators that we have validated do not support UE with multiple slices at once. -Hence, we have validated this feature using the commercial testing tool [dsTest](https://www.developingsolutions.com/products/about-dstest/). This test case is integrated in our [CI pipeline for NSSF](https://jenkins-oai.eurecom.fr/view/CN5G/job/OAI-CN5G-NSSF/) and AMF. This pipeline triggers a deployment scenario as shown in the figure below with two slices. During PDU session establishment request, AMF queries NSSF for NSI information with appropriate NRF Id. And then again corresponding SMF and UPF is slected in the NSI, based on S-NSSAI provided. You can verify this scenario from the [pcap](https://jenkins-oai.eurecom.fr/view/CN5G/job/OAI-CN5G-NSSF/lastSuccessfulBuild/artifact/docker_logs.zip). -  ## 11. Undeploy network functions diff --git a/docs/DEPLOY_SA5G_ULCL.md b/docs/DEPLOY_SA5G_ULCL.md index 30482c2c6dff06a2e9bcf895208149e8f1633f17..e85e762ffcc330307826e96046ef02c1a305ad0d 100644 --- a/docs/DEPLOY_SA5G_ULCL.md +++ b/docs/DEPLOY_SA5G_ULCL.md @@ -213,7 +213,7 @@ When the graph is fully built, it should look like this (again, the order does n ## 4. Simulate with gnbsim When the CN is deployed successfully, we can simulate a gNB and UE using `gnbsim`. -Please see the [gnbsim tutorial](./DEPLOY_SA5G_WITH_GNBSIM.md) on how to retrieve or build the image. +Please see the [gnbsim tutorial](./DEPLOY_SA5G_MINI_WITH_GNBSIM.md) on how to retrieve or build the image. ``` shell docker-compose-host $: docker-compose -f docker-compose-gnbsim-vpp.yaml up -d diff --git a/docs/DEPLOY_SA5G_WITH_My5g-RANTester.md b/docs/DEPLOY_SA5G_WITH_My5g-RANTester.md index c9d19330791c48c7deb65e272b20298d7f1b08d3..c5e3d178c068740134e8e013f3e63c3c7bcb79a2 100644 --- a/docs/DEPLOY_SA5G_WITH_My5g-RANTester.md +++ b/docs/DEPLOY_SA5G_WITH_My5g-RANTester.md @@ -13,7 +13,7 @@ </table> - + **Reading time: ~ 30mins** @@ -49,7 +49,7 @@ Note: In case readers are interested in deploying debuggers/developers core netw <br/> -This tutorial is an extension of a previous tutorial: [testing a `basic` deployment with dsTester](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md). In previous tutorial, we have seen the advanced testing tool dsTester, which is useful for validating even more complex scenarios. +This tutorial is an extension of a previous tutorial: [testing a `basic` deployment](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md). Moreover, there are various other opensource gnb/ue simulator tools that are available for SA5G test. In this tutorial, we use an opensource simulator tool called `My5g-RANTester`. With the help of `My5g-RANTester` tool, we can perform very basic SA5G test by simulating one gnb and multiple ues. @@ -61,7 +61,7 @@ Let's begin !! * Steps 1 to 5 are similar to this previous [tutorial on vpp-upf](https://gitlab.eurecom.fr/oai/cn5g/oai-cn5g-fed/-/blob/master/docs/DEPLOY_SA5G_WITH_VPP_UPF.md#5-deploying-oai-5g-core-network). Please follow these steps to deploy OAI 5G core network components. * We deploy my5G-RANTester docker service on the same host as of core network, so there is no need to create additional route as -we did for dsTest-host. +we did for gnb-host. * Before we proceed further for end-to-end SA5G test, make sure you have healthy docker services for OAI cn5g ## 1. Pre-requisites @@ -355,5 +355,3 @@ docker-compose-host $: python3 ./core-network.py --type stop-basic-vpp --scenari |:------------------------------------------------------------------------------------------ | | [5gcn-deployment-my5G-RANTester.pcap](./results/My5g-RANTester/5gcn-deployment-my5grantester.pcap) | - -* For detailed analysis of messages, please refer to this previous tutorial of [testing with dsTester](./docs/DEPLOY_SA5G_WITH_DS_TESTER.md). diff --git a/docs/DEPLOY_SA5G_WITH_OMEC_GNBSIM.md b/docs/DEPLOY_SA5G_WITH_OMEC_GNBSIM.md index d14591e55338da4b52005167645a598cd7dc4615..a8219f4ae73dd8e012e35e660f2c53abd664a2a4 100644 --- a/docs/DEPLOY_SA5G_WITH_OMEC_GNBSIM.md +++ b/docs/DEPLOY_SA5G_WITH_OMEC_GNBSIM.md @@ -13,7 +13,7 @@ </table> - + **Reading time: ~ 30mins** @@ -49,7 +49,7 @@ Note: In case readers are interested in deploying debuggers/developers core netw <br/> -This tutorial is an extension of a previous tutorial: [testing a `basic` deployment with dsTester](./DEPLOY_SA5G_BASIC_DS_TESTER_DEPLOYMENT.md). In previous tutorial, we have seen the advanced testing tool dsTester, which is useful for validating even more complex scenarios. +This tutorial is an extension of a previous tutorial: [testing a `basic` deployment](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md). Moreover, there are various other opensource gnb/ue simulator tools that are available for SA5G test. In this tutorial, we use an opensource simulator tool called `omec-gnbsim`. With the help of `omec-gnbsim` tool, we can perform basic SA5G test by simulating multiple gnb & ue. @@ -71,7 +71,7 @@ Let's begin !! * Steps 1 to 5 are similar as previous [tutorial on vpp-upf](https://gitlab.eurecom.fr/oai/cn5g/oai-cn5g-fed/-/blob/master/docs/DEPLOY_SA5G_WITH_VPP_UPF.md#5-deploying-oai-5g-core-network). Please follow these steps to deploy OAI 5G core network components. * We deploy omec-gnbsim docker service on same host as of core network, so there is no need to create additional route as -we did for dsTest-host. +we did for gnb-host. * Before we proceed further for end-to-end SA5G test, make sure you have healthy docker services for OAI cn5g ## 1. Pre-requisites @@ -235,8 +235,6 @@ docker-compose-host $: python3 ./core-network.py --type stop-basic-vpp --scenari | [5gcn-deployment-omec-gnbsim.pcapng](./results/omec-gnbsim/pcap/5gcn-deployment-omec-gnbsim.pcapng) | -* For detailed analysis of messages, please refer previous tutorial of [testing with dsTester](./docs/DEPLOY_SA5G_WITH_DS_TESTER.md). - <!--- For CI purposes please ignore this line ``` shell diff --git a/docs/DEPLOY_SA5G_WITH_UERANSIM.md b/docs/DEPLOY_SA5G_WITH_UERANSIM.md index c238262b37c633195fc16304e1327744143b4176..a9c28a5fae1eb35533b2b46e36bbbb6ebc89b104 100644 --- a/docs/DEPLOY_SA5G_WITH_UERANSIM.md +++ b/docs/DEPLOY_SA5G_WITH_UERANSIM.md @@ -13,7 +13,7 @@ </table> - + **Reading time: ~ 30mins** @@ -47,7 +47,7 @@ Note: In case readers are interested in deploying debuggers/developers core netw <br/> -This tutorial is an extension of a previous tutorial: [testing a `basic` deployment with dsTester](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md). In previous tutorial, we have seen the advanced testing tool dsTester, which is useful for validating even more complex scenarios. +This tutorial is an extension of a previous tutorial: [testing a `basic` deployment](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md). Moreover, there are various other opensource gnb/ue simulator tools that are available for SA5G test. In this tutorial, we use an opensource simulator tool called `UERANSIM`. With the help of `UERANSIM` tool, we can perform very basic SA5G test by simulating one gnb and multiple ues. @@ -59,7 +59,7 @@ Let's begin !! * Steps 1 to 5 are similar to this previous [tutorial on vpp-upf](https://gitlab.eurecom.fr/oai/cn5g/oai-cn5g-fed/-/blob/master/docs/DEPLOY_SA5G_WITH_VPP_UPF.md#5-deploying-oai-5g-core-network). Please follow these steps to deploy OAI 5G core network components. * We deploy ueransim docker service on the same host as for core network, so there is no need to create additional route as -we did for dsTest-host. +we did for gnb-host. * Before we proceed further for end-to-end SA5G test, make sure you have healthy docker services for OAI cn5g #### NOTE: #### @@ -285,7 +285,6 @@ iperf Done. | [5gcn-deployment-ueransim.pcap](./results/UERANSIM/pcap/5gcn-deployment-ueransim.pcap) | -* For detailed analysis of messages, please refer to this [previous tutorial](./docs/DEPLOY_SA5G_MINI_DEPLOYMENT.md). ## 9. Trying Some Advanced Stuff ## Here we try some scaling test with ueransim. There are additional IMSIs added into database (208950000000031-208950000000131). diff --git a/docs/DEPLOY_SA5G_WITH_VPP_UPF.md b/docs/DEPLOY_SA5G_WITH_VPP_UPF.md index 383bf300e9b310f4f801d96cb13cf4a1117cc1fa..190819ad0156a2c28ddefa20dc89c32e974b8d5b 100644 --- a/docs/DEPLOY_SA5G_WITH_VPP_UPF.md +++ b/docs/DEPLOY_SA5G_WITH_VPP_UPF.md @@ -12,7 +12,7 @@ </tr> </table> - + **Reading time: ~ 30mins** @@ -64,7 +64,7 @@ Project is available on github as VPP-UPG which follows release `16` of 3GPP spe Let's begin !! -* Steps 1 to 4 are similar to previous tutorials such as [minimalist](./DEPLOY_SA5G_MINI_DEPLOYMENT.md) or [basic](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md) deployments. Please follow these steps to deploy OAI 5G core network components. +* Steps 1 to 4 are similar to previous tutorials such as [minimalist](./DEPLOY_SA5G_MINI_WITH_GNBSIM.md) or [basic](./DEPLOY_SA5G_BASIC_DEPLOYMENT.md) deployments. Please follow these steps to deploy OAI 5G core network components. ## 1. Pre-requisites @@ -274,7 +274,7 @@ $ docker logs oai-smf ### 6.1. Test with Gnbsim -In this Section we will use Gnbsim to test our deployemt. Make sure you already have built [Gnbsim docker image](./DEPLOY_SA5G_WITH_GNBSIM.md#6-getting-a-gnbsim-docker-image)<br/> +In this Section we will use Gnbsim to test our deployemt. Make sure you already have built [Gnbsim docker image](./DEPLOY_SA5G_MINI_WITH_GNBSIM.md#6-getting-a-gnbsim-docker-image)<br/> Launch gnbsim instance: ``` shell diff --git a/docs/NETWORK_CONSIDERATIONS.md b/docs/NETWORK_CONSIDERATIONS.md new file mode 100644 index 0000000000000000000000000000000000000000..928046b33d6f349906838c8e30df2def8258c011 --- /dev/null +++ b/docs/NETWORK_CONSIDERATIONS.md @@ -0,0 +1,79 @@ +<table style="border-collapse: collapse; border: none;"> + <tr style="border-collapse: collapse; border: none;"> + <td style="border-collapse: collapse; border: none;"> + <a href="http://www.openairinterface.org/"> + <img src="./images/oai_final_logo.png" alt="" border=3 height=50 width=150> + </img> + </a> + </td> + <td style="border-collapse: collapse; border: none; vertical-align: center;"> + <b><font size = "5">OpenAirInterface 5G Core Network when using any docker-compose-based deployment</font></b> + </td> + </tr> +</table> + + + + +**OVERVIEW** + +This tutorial will help in understanding how to deploy an OAI Core Network and to connect a real RAN. + +**TABLE OF CONTENTS** + +1. [Pre-requisites](#1-pre-requisites) +2. [Network Considerations](#2-network-considerations) + +## 1. Pre-requisites ## + +The container images are built using the command `docker build` on a Ubuntu 18.04 host machine. The base image for all the containers is Ubuntu 18.04. + +The required software and their respective versions are listed below. To replicate the testbed use these versions or newer ones. + + +| Software | Version | +|:-------------------------- |:------------------------------- | +| docker engine | 19.03.6, build 369ce74a3c | +| docker-compose | 1.27.4, build 40524192 | +| Host operating system | Ubuntu 18.04.4 LTS | +| Container operating system | Ubuntu 18.04 | +| tshark | 3.4.4 (Git commit c33f6306cbb2) | +| wireshark | 3.4.4 (Git commit c33f6306cbb2) | + +### 1.1. Wireshark ### + +The new version of `wireshark` may not be available in the ubuntu repository. + +- So it is better to build it from source. + +You may also use the developer PPA: + +```bash +sudo add-apt-repository ppa:wireshark-dev/stable +sudo apt update +sudo apt install wireshark + +wireshark --version +Wireshark 3.4.7 (Git v3.4.7 packaged as 3.4.7-1~ubuntu18.04.0+wiresharkdevstable1) +``` + +## 2. Network Considerations ## + +### 2.1. on the Core Network side ### + +Most of the times the `docker-compose-host` machine is not configured with packet forwarding. It can be done using the command below (if you have already done it in any other section then don't repeat). + +**This is the most important step towards end-to-end connectivity.** + +```bash +(docker-compose-host)$ sudo sysctl net.ipv4.conf.all.forwarding=1 +(docker-compose-host)$ sudo iptables -P FORWARD ACCEPT +``` + +### 2.2. on the RAN side ### + +We need to make the CN-5G containers visible from this host + +```bash +(gnb-host)$ sudo ip route add 192.168.70.128/26 via IP_ADDR_NIC0 dev NIC1 +```