Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • pasolini/openairinterface5g
  • odukan/openairinterface5g
  • ewa/openairinterface5g
  • deksprime/openairinterface5g
  • jackokie/openairinterface5g
  • Srushti16/openairinterface5g
  • BRodolphe/openairinterface5g
  • kramantas/openairinterface5g
  • suraj_4g5g/openairinterface5g
  • turletti/openairinterface5g
  • anandriisc/openairinterface5g
  • lvguorong/openairinterface5g
  • dast/openairinterface5g
  • yashwanthr/openairinterface5g
  • ajiti2tb/openairinterface5g
  • qzhou/openairinterface5g
  • nickmxxx/openairinterface5g
  • bin_he4/openairinterface5g
  • delarco/openairinterface5g
  • limx1980/openairinterface5g
  • Aniq/openairinterface5g
  • yassir63/openairinterface5g
  • orc318/openairinterface5g
  • vader/openairinterface5g
  • limx59/openairinterface5g
  • nadavaati_12345/openairinterface5g
  • jenshz/openairinterface5g
  • kuldeep/openairinterface5g
  • lurker/openairinterface5g
  • shariat/openairinterface5g
  • Alireza.najafzadeh/openairinterface5g
  • Ling/openairinterface5g
  • EvanKrall/openairinterface5g
  • youyih/openairinterface5g
  • anindya/openairinterface5g
  • ahan/openairinterface5g
  • beraoud/openairinterface5g
  • obejarano/openairinterface5g
  • Monti/openairinterface5g
  • akhamsi/openairinterface5g
  • Worker.N/openairinterface5g
  • zhangtu/openairinterface5g
  • desouza/openairinterface5g
  • zhijun/openairinterface5g
  • sureshkumar/openairinterface5g
  • milan/openairinterface5g
  • bigbangbingo/openairinterface5g
  • platini/openairinterface5g
  • muralir-nv/openairinterface5g
  • Joshua_Zhang/openairinterface5g
  • siddharthmurali1/openairinterface5g
  • sorinros/openairinterface5g
  • elainecao/openairinterface5g
  • sneltved/openairinterface5g
  • aikaterini.trilyraki/openairinterface5g
  • wujunning11/openairinterface5g
  • magounak/openairinterface5g
  • ycl1729020039/openairinterface5g
  • mayukhweb/openairinterface5g
  • wataru/openairinterface5g
  • afonsoli/openairinterface5g
  • ppokar/openairinterface5g
  • emest/openairinterface5g
  • Najib/openairinterface5g
  • liqing/openairinterface5g
  • gprshome/openairinterface5g
  • Dvevgedveccc/openairinterface5g
  • Elena_Lukashova/openairinterface5g
  • imaneouss/openairinterface5g
  • yangyuan/openairinterface5g
  • ycliang/openairinterface5g
  • rohanfds/openairinterface5g
  • cong2008abc/openairinterface5g
  • Giovanni/openairinterface5g
  • willvegapunk/openairinterface5g
  • Chen/openairinterface5g
  • Ella/openairinterface5g
  • kollabalu/openairinterface5g
  • tsaichanglan/openairinterface5g
  • Artifice/openairinterface5g
  • HJR0129/openairinterface5g
  • alextp/openairinterface5g
  • Changron/openairinterface5g
  • pedosb/openairinterface5g
  • Flozzen/openairinterface5g
  • hobei/openairinterface5g
  • WP_Jing/openairinterface5g
  • reset4/openairinterface5g
  • alexjoseph/openairinterface5g
  • latuan1710/openairinterface5g
  • wynter-wang/openairinterface5g
  • stt12706/openairinterface5g
  • sy/openairinterface5g
  • dzxu/openairinterface5g
  • ptizoom/openairinterface5g
  • Thierry/openairinterface5g
  • tjamc80/openairinterface5g
  • yenmuse/openairinterface5g
  • archerling/openairinterface5g
  • grahul/openairinterface5g
  • ashish.shri/openairinterface5g
  • TianyuChen/openairinterface5g
  • cuixf1/openairinterface5g
  • Jan/openairinterface5g
  • jboatenng/openairinterface5g_gpio
  • geokal/openairinterface5g
  • johannhg/openairinterface5g
  • TofunmiA/openairinterface5g
  • razvanursu/openairinterface5g-mac-scheduling
  • Julio/openairinterface5g
  • fredrichx/openairinterface5g
  • nems/openairinterface5g
  • wb_li/openairinterface5g
  • ferrieux/openairinterface5g
  • prajna_g/openairinterface-5-g-xnap-ho
  • mtinasc/openairinterface5g
  • Hofschroeer/openairinterface5g
  • buptxiaofeng/openairinterface5g
  • fjgh_759/openairinterface5g
  • calcel/openairinterface5g
  • Reem/openairinterface5g
  • havar_mind/openairinterface5g
  • shrinish/openairinterface5g
  • YANGHELINDE/openairinterface5g
  • lool/openairinterface5g
  • raghav1900/openairinterface5g
  • allan1201/openairinterface5g
  • ferris/openairinterface5g
  • seanzw/openairinterface5g
  • emad72/openairinterface5g
  • guojilong123/openairinterface5g
  • Rony99/openairinterface5g
  • lity/openairinterface5g
  • sshrivastava/openairinterface5g
  • zhihengzhang/openairinterface5g
  • Rakesh_B_B/openairinterface5g
  • baleeiro/openairinterface5g
  • 19125064/openairinterface5g
  • linlin/openairinterface5g
  • NA1VE/openairinterface5g
  • oai1B/openairinterface5g
  • daveprice/openairinterface5g
  • mo/openairinterface5g
  • dhanmeet/openairinterface5g
  • mv2290/openairinterface-5-g-test
  • pagmatt/openairinterface5g
  • mmTestNYU/openairinterface5g
  • mmezzavilla/openairinterface5g
  • sudhakarb/openairinterface5g
  • mekki/openairinterface5g
  • virtanen/openairinterface5g
  • dyyu/openairinterface5g
  • mohammed_safwan/openairinterface5g
  • venkat/openairinterface5g
  • rupadhya/openairinterface5g
  • adjou/openairinterface5g
  • samiemostafavi/openairinterface5g-edaf
  • Sreeram/openairinterface5g
  • oliverxsch/openairinterface5g
  • oai/openairinterface5g
160 results
Show changes
Showing
with 3102 additions and 758 deletions
apiVersion: batch/v1
kind: Job
metadata:
name: {{ .Chart.Name }}
spec:
template:
metadata:
labels:
app: physim
spec:
securityContext:
{{- toYaml .Values.podSecurityContext | nindent 8 }}
{{- if .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
containers:
- name: physim
image: "{{ .Values.global.image.repository }}:{{ .Values.global.image.version }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
{{- if .Values.global.resources.define}}
resources:
requests:
cpu: {{ .Values.global.resources.requests.cpu | quote }}
{{- end}}
securityContext:
{{- toYaml .Values.securityContext | nindent 12 }}
env:
- name: OPENAIR_DIR
value: /opt/oai-physim
command: ["/bin/sh", "-c"]
args:
- >
cmake_targets/autotests/run_exec_autotests.bash -g "smallblocktest" -d bin/ &&
echo "FINISHED" && sleep infinity
dnsPolicy: ClusterFirst
restartPolicy: Never
schedulerName: default-scheduler
serviceAccountName: {{ .Values.global.serviceAccountName }}
terminationGracePeriodSeconds: 30
{{- if .Values.global.nodeSelector}}
nodeSelector:
{{- toYaml .Values.global.nodeSelector | nindent 12 }}
{{- end }}
{{- if .Values.global.nodeName}}
nodeName: {{ .Values.global.nodeName.smallblocktest }}
{{- end }}
# Default values for oai-smallblocktest.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
replicaCount: 1
image:
# pullPolicy: IfNotPresent or Never or Always
pullPolicy: Always
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
serviceAccount:
# Specifies whether a service account should be created
create: true
# Annotations to add to the service account
annotations: {}
# The name of the service account to use.
# If not set and create is true, a name is generated using the fullname template
name: "oai-smallblocktest-sa"
podSecurityContext:
runAsUser: 0
runAsGroup: 0
securityContext:
privileged: false
resources: {}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
nodeSelector: {}
tolerations: []
affinity: {}
apiVersion: v1
name: oai-ulsim
description: A Helm subchart for ulsim network function
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
icon: http://www.openairinterface.org/wp-content/uploads/2015/06/cropped-oai_final_logo.png
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
version: 0.1.1
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application.
appVersion: v1
keywords:
- Physical Simulator
- ulsim
- RAN
- 4G
sources:
- https://gitlab.eurecom.fr/oai/openairinterface5g
maintainers:
- name: OPENAIRINTERFACE
email: contact@openairinterface.org
{{/* vim: set filetype=mustache: */}}
{{/*
Expand the name of the chart.
*/}}
{{- define "oai-ulsim.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "oai-ulsim.fullname" -}}
{{- if .Values.fullnameOverride -}}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}}
{{- else -}}
{{- $name := default .Chart.Name .Values.nameOverride -}}
{{- if contains $name .Release.Name -}}
{{- .Release.Name | trunc 63 | trimSuffix "-" -}}
{{- else -}}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{- end -}}
{{- end -}}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "oai-ulsim.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}}
{{- end -}}
{{/*
Common labels
*/}}
{{- define "oai-ulsim.labels" -}}
helm.sh/chart: {{ include "oai-ulsim.chart" . }}
{{ include "oai-ulsim.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end -}}
{{/*
Selector labels
*/}}
{{- define "oai-ulsim.selectorLabels" -}}
app.kubernetes.io/name: {{ include "oai-ulsim.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end -}}
{{/*
Create the name of the service account to use
*/}}
{{- define "oai-ulsim.serviceAccountName" -}}
{{- if .Values.serviceAccount.create -}}
{{ default (include "oai-ulsim.fullname" .) .Values.serviceAccount.name }}
{{- else -}}
{{ default "default" .Values.serviceAccount.name }}
{{- end -}}
{{- end -}}
apiVersion: batch/v1
kind: Job
metadata:
name: {{ .Chart.Name }}
spec:
template:
metadata:
labels:
app: physim
spec:
securityContext:
{{- toYaml .Values.podSecurityContext | nindent 8 }}
{{- if .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
containers:
- name: physim
image: "{{ .Values.global.image.repository }}:{{ .Values.global.image.version }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
{{- if .Values.global.resources.define}}
resources:
requests:
cpu: {{ .Values.global.resources.requests.cpu | quote }}
{{- end}}
securityContext:
{{- toYaml .Values.securityContext | nindent 12 }}
env:
- name: OPENAIR_DIR
value: /opt/oai-physim
command: ["/bin/sh", "-c"]
args:
- >
cmake_targets/autotests/run_exec_autotests.bash -g "ulsim" -d bin/ &&
echo "FINISHED" && sleep infinity
dnsPolicy: ClusterFirst
restartPolicy: Never
schedulerName: default-scheduler
serviceAccountName: {{ .Values.global.serviceAccountName }}
terminationGracePeriodSeconds: 30
{{- if .Values.global.nodeSelector}}
nodeSelector:
{{- toYaml .Values.global.nodeSelector | nindent 12 }}
{{- end }}
{{- if .Values.global.nodeName}}
nodeName: {{ .Values.global.nodeName.ulsim }}
{{- end }}
# Default values for oai-ulsim.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
replicaCount: 1
image:
# pullPolicy: IfNotPresent or Never or Always
pullPolicy: Always
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
serviceAccount:
# Specifies whether a service account should be created
create: true
# Annotations to add to the service account
annotations: {}
# The name of the service account to use.
# If not set and create is true, a name is generated using the fullname template
name: "oai-ulsim-sa"
podSecurityContext:
runAsUser: 0
runAsGroup: 0
securityContext:
privileged: false
resources: {}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
nodeSelector: {}
tolerations: []
affinity: {}
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: {{ .Chart.Name }}-{{ .Release.Namespace }}-role
rules:
- apiGroups:
- security.openshift.io
resourceNames:
- anyuid
resources:
- securitycontextconstraints
verbs:
- use
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: {{ .Chart.Name }}-{{ .Release.Namespace }}-binding
subjects:
- kind: ServiceAccount
name: {{ .Values.global.serviceAccountName }}
namespace: {{ .Release.Namespace }}
roleRef:
kind: Role
name: {{ .Chart.Name }}-{{ .Release.Namespace }}-role
apiGroup: rbac.authorization.k8s.io
apiVersion: v1
kind: ServiceAccount
metadata:
name: oai-physim-sa #{{ .Values.global.serviceAccountName }}
# Default values for oai-physim.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
global:
serviceAccountName: oai-physim-sa
namespace: "OAICICD_PROJECT"
image:
registry: local
repository: image-registry.openshift-image-registry.svc:5000/oaicicd-ran/oai-physim
version: TAG
# pullPolicy: IfNotPresent or Never or Always
pullPolicy: Always
# removing the node selector
# will place on two nodes intel 3rd gen and 5th gen with RT kernel
nodeSelector:
type: ran
nodeName: ''
resources:
define: false
requests:
cpu: 1.5
r*.raw
enb_*.log
ue_*.log
ping_*.*
iperf_*.*
phones_list.txt
modules_list.txt
test_results*.html
pMain*
__pycache__
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
// Location of the executor node
def nodeExecutor = params.nodeExecutor
// Tags to shorten pipeline duration
def doBuild = true
def do4Gtest = false
def do5Gtest = false
def do5GUeTest = false
//
def gitCommitAuthorEmailAddr
// list of failing stages
def failingStages = ""
pipeline {
agent {
label nodeExecutor
}
options {
timestamps()
gitLabConnection('OAI GitLab')
ansiColor('xterm')
}
stages {
stage ("Verify Parameters") {
steps {
script {
JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
JOB_TIMESTAMP = JOB_TIMESTAMP.trim()
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
def allParametersPresent = true
echo '\u2705 \u001B[32mVerify Labels\u001B[0m'
if ("MERGE".equals(env.gitlabActionType)) {
LABEL_CHECK = sh returnStdout: true, script: 'ci-scripts/checkGitLabMergeRequestLabels.sh --mr-id ' + env.gitlabMergeRequestIid
LABEL_CHECK = LABEL_CHECK.trim()
if (LABEL_CHECK == 'NONE') {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): Your merge request should have one of the mandatory labels:\n\n"
message += " - ~documentation (don't perform any stages)\n"
message += " - ~BUILD-ONLY (execute only build stages)\n"
message += " - ~4G-LTE (perform 4G tests)\n"
message += " - ~5G-NR (perform 5G tests)\n"
message += " - ~CI (perform both 4G and 5G tests)\n"
message += " - ~nrUE (perform only 5G-UE related tests including physims excluding LDPC tests)\n\n"
message += "Not performing CI due to lack of labels"
addGitLabMRComment comment: message
error('Not performing CI due to lack of labels')
} else if (LABEL_CHECK == 'FULL') {
do4Gtest = true
do5Gtest = true
do5GUeTest = true
} else if (LABEL_CHECK == "SHORTEN-4G") {
do4Gtest = true
} else if (LABEL_CHECK == 'SHORTEN-5G') {
do5Gtest = true
} else if (LABEL_CHECK == 'SHORTEN-5G-UE') {
do5GUeTest = true
} else if (LABEL_CHECK == 'SHORTEN-4G-5G-UE') {
do4Gtest = true
do5GUeTest = true
} else if (LABEL_CHECK == 'documentation') {
doBuild = false
} else {
// is "BUILD-ONLY", will only build
}
} else {
do4Gtest = true
do5Gtest = true
}
}
}
}
stage ("Verify Guidelines") {
steps {
echo "Git URL is ${GIT_URL}"
echo "GitLab Act is ${env.gitlabActionType}"
script {
if ("MERGE".equals(env.gitlabActionType)) {
// since a bit, in push events, gitlabUserEmail is not populated
gitCommitAuthorEmailAddr = env.gitlabUserEmail
echo "GitLab Usermail is ${gitCommitAuthorEmailAddr}"
// GitLab-Jenkins plugin integration is lacking to perform the merge by itself
// Doing it manually --> it may have merge conflicts
sh "./ci-scripts/doGitLabMerge.sh --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}"
} else {
echo "Git Branch is ${GIT_BRANCH}"
echo "Git Commit is ${GIT_COMMIT}"
// since a bit, in push events, gitlabUserEmail is not populated
gitCommitAuthorEmailAddr = sh returnStdout: true, script: 'git log -n1 --pretty=format:%ae ${GIT_COMMIT}'
gitCommitAuthorEmailAddr = gitCommitAuthorEmailAddr.trim()
echo "GitLab Usermail is ${gitCommitAuthorEmailAddr}"
}
}
}
post {
failure {
script {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): Merge Conflicts -- Cannot perform CI"
addGitLabMRComment comment: message
currentBuild.result = 'FAILURE'
}
}
}
}
// Build Stages are Mandatory
stage ("Image Building Processes") {
when { expression {doBuild} }
parallel {
stage ("Ubuntu-Image-Builder") {
steps {
script {
triggerSlaveJob ('RAN-Ubuntu18-Image-Builder', 'Ubuntu-Image-Builder')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
ubuntuBuildStatus = finalizeSlaveJob('RAN-Ubuntu18-Image-Builder')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += ubuntuBuildStatus
}
}
}
}
stage ("Ubuntu-ARM-Image-Builder") {
steps {
script {
triggerSlaveJob ('RAN-Ubuntu-ARM-Image-Builder', 'Ubuntu-ARM-Image-Builder')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
ubuntuArmBuildStatus = finalizeSlaveJob('RAN-Ubuntu-ARM-Image-Builder')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += ubuntuArmBuildStatus
}
}
}
}
stage ("RHEL-Cluster-Image-Builder") {
steps {
script {
triggerSlaveJob ('RAN-RHEL8-Cluster-Image-Builder', 'RHEL-Cluster-Image-Builder')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
rhelBuildStatus = finalizeSlaveJob('RAN-RHEL8-Cluster-Image-Builder')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += rhelBuildStatus
}
}
}
}
stage ("cppcheck") {
steps {
script {
triggerSlaveJob ('RAN-cppcheck', 'cppcheck')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
cppcheckStatus = finalizeSlaveJob('RAN-cppcheck')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += cppcheckStatus
}
}
}
}
stage ("ARM-Cross-Compile") {
steps {
script {
triggerSlaveJob ('RAN-ARM-Cross-Compile-Builder', 'ARM-Cross-Compilation')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
armBuildStatus = finalizeSlaveJob('RAN-ARM-Cross-Compile-Builder')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += armBuildStatus
}
}
}
}
}
}
stage ("Image Test Processes") {
when { expression {doBuild} }
parallel {
stage ("PhySim-Cluster") {
when { expression {do4Gtest || do5Gtest || do5GUeTest} }
steps {
script {
triggerSlaveJob ('RAN-PhySim-Cluster', 'PhySim-Cluster')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
physimStatus = finalizeSlaveJob('RAN-PhySim-Cluster')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += physimStatus
}
}
}
}
stage ("RF-Sim-Test-4G") {
when { expression {do4Gtest} }
steps {
script {
triggerSlaveJob ('RAN-RF-Sim-Test-4G', 'RF-Sim-Test-4G')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
rfSim4GStatus = finalizeSlaveJob('RAN-RF-Sim-Test-4G')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += rfSim4GStatus
}
}
}
}
stage ("RF-Sim-Test-5G") {
when { expression {do5Gtest || do5GUeTest} }
steps {
script {
triggerSlaveJob ('RAN-RF-Sim-Test-5G', 'RF-Sim-Test-5G')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
rfSim5GStatus = finalizeSlaveJob('RAN-RF-Sim-Test-5G')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += rfSim5GStatus
}
}
}
}
stage ("OAI-FLEXRIC-RAN-Integration-Test") {
when { expression {do5Gtest || do5GUeTest} }
steps {
script {
triggerSlaveJob ('OAI-FLEXRIC-RAN-Integration-Test', 'OAI-FLEXRIC-RAN-Integration-Test')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
flexricRAN5GStatus = finalizeSlaveJob('OAI-FLEXRIC-RAN-Integration-Test')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += flexricRAN5GStatus
}
}
}
}
stage ("L2-Sim-Test-4G") {
when { expression {do4Gtest} }
steps {
script {
triggerSlaveJob ('RAN-L2-Sim-Test-4G', 'L2-Sim-Test-4G')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
l2Sim4GStatus = finalizeSlaveJob('RAN-L2-Sim-Test-4G')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += l2Sim4GStatus
}
}
}
}
stage ("LTE-B200-FDD-LTEBOX-Container") {
when { expression {do4Gtest} }
steps {
script {
triggerSlaveJob ('RAN-LTE-FDD-LTEBOX-Container', 'LTE-FDD-LTEBOX-Container')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
lteTDDB200Status = finalizeSlaveJob('RAN-LTE-FDD-LTEBOX-Container')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += lteTDDB200Status
}
}
}
}
// Pipeline to test OAI LTE-UE
stage ("LTE-B200-FDD-OAIUE-OAICN4G-Container") {
when { expression {do4Gtest} }
steps {
script {
triggerSlaveJob ('RAN-LTE-FDD-OAIUE-OAICN4G-Container', 'LTE-FDD-OAIUE-OAICN4G-Container')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
lteFDDB200OAIUEStatus = finalizeSlaveJob('RAN-LTE-FDD-OAIUE-OAICN4G-Container')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += lteFDDB200OAIUEStatus
}
}
}
}
stage ("LTE-B200-TDD-LTEBOX-Container") {
when { expression {do4Gtest} }
steps {
script {
triggerSlaveJob ('RAN-LTE-TDD-LTEBOX-Container', 'LTE-TDD-LTEBOX-Container')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
lteFDDB200Status = finalizeSlaveJob('RAN-LTE-TDD-LTEBOX-Container')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += lteFDDB200Status
}
}
}
}
stage ("NSA-B200-Module-LTEBOX-Container") {
when { expression {do4Gtest || do5Gtest} }
steps {
script {
triggerSlaveJob ('RAN-NSA-B200-Module-LTEBOX-Container', 'NSA-B200-Module-LTEBOX-Container')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
nsaTDDB200Status = finalizeSlaveJob('RAN-NSA-B200-Module-LTEBOX-Container')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += nsaTDDB200Status
}
}
}
}
stage ("SA-B200-Module-SABOX-Container") {
when { expression {do5Gtest} }
steps {
script {
triggerSlaveJob ('RAN-SA-B200-Module-SABOX-Container', 'SA-B200-Module-SABOX-Container')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
saTDDB200Status = finalizeSlaveJob('RAN-SA-B200-Module-SABOX-Container')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += saTDDB200Status
}
}
}
}
stage ("gNB-N300-Timing-Phytest-LDPC") {
when { expression {do5Gtest} }
steps {
script {
triggerSlaveJob ('RAN-gNB-N300-Timing-Phytest-LDPC', 'gNB-N300-Timing-Phytest-LDPC')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
phytestLDPCoffloadStatus = finalizeSlaveJob('RAN-gNB-N300-Timing-Phytest-LDPC')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += phytestLDPCoffloadStatus
}
}
}
}
stage ("LTE-TDD-2x2-Container") {
when { expression {do4Gtest} }
steps {
script {
triggerSlaveJob ('RAN-LTE-TDD-2x2-Container', 'LTE-TDD-2x2-Container')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
lteTDD2x2N3xxStatus = finalizeSlaveJob('RAN-LTE-TDD-2x2-Container')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += lteTDD2x2N3xxStatus
}
}
}
}
stage ("SA-AW2S-CN5G") {
when { expression {do5Gtest} }
steps {
script {
triggerSlaveJob ('RAN-SA-AW2S-CN5G', 'SA-AW2S-CN5G')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
saAW2SStatus = finalizeSlaveJob('RAN-SA-AW2S-CN5G')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += saAW2SStatus
}
}
}
}
stage ("Sanity-Check OAI-CN5G") {
when { expression {do5Gtest} }
steps {
script {
triggerCN5GSlaveJob ('OAI-CN5G-COTS-UE-Test', 'OAI-CN5G-COTS-UE-Test')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
cn5gCOTSUESanityCheck = finalizeSlaveJob('OAI-CN5G-COTS-UE-Test')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += cn5gCOTSUESanityCheck
}
}
}
}
stage ("SA-AERIAL-CN5G") {
when { expression {do5Gtest} }
steps {
script {
triggerSlaveJob ('RAN-SA-AERIAL-CN5G', 'SA-AERIAL-CN5G')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
saAERIALStatus = finalizeSlaveJob('RAN-SA-AERIAL-CN5G')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += saAERIALStatus
}
}
}
}
stage ("SA-2x2-Module-CN5G") {
when { expression {do5Gtest} }
steps {
script {
triggerSlaveJob ('RAN-SA-2x2-Module-CN5G', 'SA-2x2-Module-CN5G')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
saTDD2x2Status = finalizeSlaveJob('RAN-SA-2x2-Module-CN5G')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += saTDD2x2Status
}
}
}
}
stage ("SA-FHI72-CN5G") {
when { expression {do5Gtest} }
steps {
script {
triggerSlaveJob ('RAN-SA-FHI72-CN5G', 'SA-FHI72-CN5G')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
saFHI72Status = finalizeSlaveJob('RAN-SA-FHI72-CN5G')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += saFHI72Status
}
}
}
}
stage ("SA-OAIUE-CN5G") {
when { expression {do5Gtest || do5GUeTest} }
steps {
script {
triggerSlaveJob ('RAN-SA-OAIUE-CN5G', 'SA-OAIUE-CN5G')
}
}
post {
always {
script {
// Using a unique variable name for each test stage to avoid overwriting on a global variable
// due to parallel-time concurrency
saOAIUEStatus = finalizeSlaveJob('RAN-SA-OAIUE-CN5G')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
failingStages += saOAIUEStatus
}
}
}
}
}
}
stage ("DockerHub-Push") {
when { expression {doBuild && "PUSH".equals(env.gitlabActionType)} }
steps {
script {
triggerSlaveJob ('RAN-DockerHub-Push', 'DockerHub-Push')
}
}
post {
failure {
script {
echo "Push to Docker-Hub KO"
currentBuild.result = 'FAILURE'
failingStages += '\n * RAN-DockerHub-Push'
}
}
}
}
}
post {
success {
script {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): passed (" + BUILD_URL + ")"
if ("MERGE".equals(env.gitlabActionType)) {
addGitLabMRComment comment: message
def message2 = message + " -- MergeRequest #" + env.gitlabMergeRequestIid + " (" + env.gitlabMergeRequestTitle + ")"
}
echo "Pipeline is SUCCESSFUL"
}
}
failure {
script {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): failed (" + BUILD_URL + ")"
if ("MERGE".equals(env.gitlabActionType)) {
def fullMessage = message + '\n\nList of failing test stages:' + failingStages
addGitLabMRComment comment: fullMessage
def message2 = message + " -- MergeRequest #" + env.gitlabMergeRequestIid + " (" + env.gitlabMergeRequestTitle + ")"
}
echo "Pipeline FAILED"
}
}
}
}
// ---- Slave Job functions
def triggerSlaveJob (jobName, gitlabStatusName) {
if ("MERGE".equals(env.gitlabActionType)) {
MR_NUMBER = env.gitlabMergeRequestIid
} else {
MR_NUMBER = 'develop'
}
// Workaround for the "cancelled" GitLab pipeline notification
// The slave job is triggered with the propagate false so the following commands are executed
// Its status is now PASS/SUCCESS from a stage pipeline point of view
// localStatus variable MUST be analyzed to properly assess the status
localStatus = build job: jobName,
parameters: [
string(name: 'eNB_Repository', value: String.valueOf(GIT_URL)),
string(name: 'eNB_Branch', value: String.valueOf(env.gitlabSourceBranch)),
string(name: 'eNB_CommitID', value: String.valueOf(env.gitlabMergeRequestLastCommit)),
string(name: 'eNB_MR', value: String.valueOf(MR_NUMBER)),
booleanParam(name: 'eNB_mergeRequest', value: "MERGE".equals(env.gitlabActionType)),
string(name: 'eNB_TargetBranch', value: String.valueOf(env.gitlabTargetBranch))
], propagate: false
localResult = localStatus.getResult()
echo "${jobName} Slave Job status is ${localResult}"
gitlabCommitStatus(name: gitlabStatusName) {
if (localStatus.resultIsBetterOrEqualTo('SUCCESS')) {
echo "${jobName} Slave Job is OK"
} else {
error "${jobName} Slave Job is KO"
}
}
}
def triggerCN5GSlaveJob (jobName, gitlabStatusName) {
if ("MERGE".equals(env.gitlabActionType)) {
shortenShaOne = sh returnStdout: true, script: 'git log -1 --pretty=format:"%h" --abbrev=8 ' + env.gitlabMergeRequestLastCommit
shortenShaOne = shortenShaOne.trim()
branchName = env.gitlabSourceBranch.replaceAll("/", "-").trim()
fullRanTag = 'porcepix.sboai.cs.eurecom.fr/oai-gnb:' + env.gitlabSourceBranch + '-' + shortenShaOne
} else {
shortenShaOne = sh returnStdout: true, script: 'git log -1 --pretty=format:"%h" --abbrev=8 ' + env.GIT_COMMIT
shortenShaOne = shortenShaOne.trim()
fullRanTag = 'porcepix.sboai.cs.eurecom.fr/oai-gnb:develop-' + shortenShaOne
}
// Workaround for the "cancelled" GitLab pipeline notification
// The slave job is triggered with the propagate false so the following commands are executed
// Its status is now PASS/SUCCESS from a stage pipeline point of view
// localStatus variable MUST be analyzed to properly assess the status
localStatus = build job: jobName,
parameters: [
string(name: 'FULL_RAN_TAG', value: String.valueOf(fullRanTag))
], propagate: false
localResult = localStatus.getResult()
echo "${jobName} Slave Job status is ${localResult}"
gitlabCommitStatus(name: gitlabStatusName) {
if (localStatus.resultIsBetterOrEqualTo('SUCCESS')) {
echo "${jobName} Slave Job is OK"
} else {
error "${jobName} Slave Job is KO"
}
}
}
def triggerSlaveJobNoGitLab (jobName) {
if ("MERGE".equals(env.gitlabActionType)) {
MR_NUMBER = env.gitlabMergeRequestIid
} else {
MR_NUMBER = 'develop'
}
// Workaround for the "cancelled" GitLab pipeline notification
// The slave job is triggered with the propagate false so the following commands are executed
// Its status is now PASS/SUCCESS from a stage pipeline point of view
// localStatus variable MUST be analyzed to properly assess the status
localStatus = build job: jobName,
parameters: [
string(name: 'eNB_Repository', value: String.valueOf(GIT_URL)),
string(name: 'eNB_Branch', value: String.valueOf(env.gitlabSourceBranch)),
string(name: 'eNB_CommitID', value: String.valueOf(env.gitlabMergeRequestLastCommit)),
string(name: 'eNB_MR', value: String.valueOf(MR_NUMBER)),
booleanParam(name: 'eNB_mergeRequest', value: "MERGE".equals(env.gitlabActionType)),
string(name: 'eNB_TargetBranch', value: String.valueOf(env.gitlabTargetBranch))
], propagate: false
localResult = localStatus.getResult()
echo "${jobName} Slave Job status is ${localResult}"
if (localStatus.resultIsBetterOrEqualTo('SUCCESS')) {
echo "${jobName} Slave Job is OK"
} else {
error "${jobName} Slave Job is KO"
}
}
def finalizeSlaveJob(jobName) {
lock ('Parent-Lock') {
// In case of any non-success, we are retrieving the HTML report of the last completed
// slave job. The only drop-back is that we may retrieve the HTML report of a previous build
if (jobName == 'OAI-CN5G-COTS-UE-Test') {
fileName = "test_results_oai_cn5g_cots_ue.html"
} else {
fileName = "test_results-${jobName}.html"
}
artifactUrl = BUILD_URL
if (!fileExists(fileName)) {
copyArtifacts(projectName: jobName,
filter: 'test_results*.html',
selector: lastCompleted())
if (fileExists(fileName)) {
sh "sed -i -e 's#TEMPLATE_BUILD_TIME#${JOB_TIMESTAMP}#' ${fileName}"
archiveArtifacts artifacts: fileName
// BUILD_URL is like http://server:port/jenkins/job/foo/15/
// no need to add a prefixed '/'
artifactUrl += 'artifact/' + fileName
}
}
artifactUrl = "\n * [${jobName}](${artifactUrl})"
return artifactUrl
}
}
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
// Location of the executor node
def pythonExecutor = params.pythonExecutor
// Location of the test XML file to be run
def testXMLFile = params.pythonTestXmlFile
def mainPythonAllXmlFiles = ""
def buildStageStatus = true
// Name of the test stage
def testStageName = params.pipelineTestStageName
// Name of the resource
def lockResources = []
if (params.LockResources != null && params.LockResources.trim().length() > 0)
params.LockResources.trim().split(",").each{lockResources += [resource: it.trim()]}
// Global Parameters. Normally they should be populated when the master job
// triggers the slave job with parameters
def eNB_Repository
def eNB_Branch
def eNB_CommitID
def eNB_AllowMergeRequestProcess = false
def eNB_TargetBranch
def directoryExistsGlob(fileNameWithGlob) {
/* if multiple directories match, will join their names and call fileExists() on that */
def dir = sh returnStdout: true, script: 'find . -name \'' + fileNameWithGlob + '\' -type d'
dir = dir.replaceAll("\n", "").trim()
echo "matching '" + fileNameWithGlob + "' found: '" + dir + "'"
if (dir == "")
return false
return fileExists(dir)
}
//-------------------------------------------------------------------------------
// Pipeline start
pipeline {
agent {
label pythonExecutor
}
options {
timestamps()
ansiColor('xterm')
lock(extra: lockResources)
}
stages {
stage("Build Init") {
steps {
// update the build name and description
buildName "${params.eNB_MR}"
buildDescription "Branch : ${params.eNB_Branch}"
}
}
stage ('Verify Parameters') {
steps {
script {
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
JOB_TIMESTAMP = JOB_TIMESTAMP.trim()
def allParametersPresent = true
// It is already to late to check it
if (params.pythonExecutor != null) {
echo "eNB CI executor node : ${pythonExecutor}"
}
// If not present picking a default Stage Name
if (params.pipelineTestStageName == null) {
// picking default
testStageName = 'Template Test Stage'
}
if (params.LockResources == null) {
echo "no LockResources given"
allParametersPresent = false
}
if (params.eNB_IPAddress == null) {
echo "no eNB_IPAddress given"
allParametersPresent = false
}
if (params.eNB_SourceCodePath == null) {
echo "no eNB_SourceCodePath given"
allParametersPresent = false
}
if (params.eNB_Credentials == null) {
echo "no eNB_Credentials given"
allParametersPresent = false
}
// the following 4 parameters should be pushed by the master trigger
// if not present, take the job GIT variables (used for developing)
if (params.eNB_Repository == null) {
eNB_Repository = env.GIT_URL
} else {
eNB_Repository = params.eNB_Repository
}
echo "eNB_Repository : ${eNB_Repository}"
if (params.eNB_Branch == null) {
eNB_Branch = env.GIT_BRANCH
} else {
eNB_Branch = params.eNB_Branch
}
echo "eNB_Branch : ${eNB_Branch}"
if (params.eNB_CommitID == null) {
eNB_CommitID = env.GIT_COMMIT
} else {
eNB_CommitID = params.eNB_CommitID
}
echo "eNB_CommitID : ${eNB_CommitID}"
if (params.eNB_mergeRequest != null) {
eNB_AllowMergeRequestProcess = params.eNB_mergeRequest
if (eNB_AllowMergeRequestProcess) {
if (params.eNB_TargetBranch != null) {
eNB_TargetBranch = params.eNB_TargetBranch
} else {
eNB_TargetBranch = 'develop'
}
echo "eNB_TargetBranch : ${eNB_TargetBranch}"
}
} else {
echo "no eNB_mergeRequest given - not merging develop"
}
if (params.EPC_IPAddress == null) {
echo "no EPC_IPAddress given"
allParametersPresent = false
}
if (params.EPC_Type == null) {
echo "no EPC_Type given"
allParametersPresent = false
}
if (params.EPC_SourceCodePath == null) {
echo "no EPC_SourceCodePath given"
allParametersPresent = false
}
if (params.EPC_Credentials == null) {
echo "no EPC_Credentials given"
allParametersPresent = false
}
if (params.OC_Credentials == null) {
echo "no OC_Credentials given"
allParametersPresent = false
}
if (params.OC_ProjectName == null) {
echo "no OC_ProjectName given"
allParametersPresent = false
}
if (params.pythonTestXmlFile == null) {
echo "no pythonTestXmlFile given"
allParametersPresent = false
}
if (allParametersPresent) {
echo "All parameters are present"
if (eNB_AllowMergeRequestProcess) {
sh "git fetch"
sh "./ci-scripts/doGitLabMerge.sh --src-branch ${eNB_Branch} --src-commit ${eNB_CommitID} --target-branch ${eNB_TargetBranch} --target-commit latest"
} else {
sh "git fetch"
sh "git checkout -f ${eNB_CommitID}"
}
} else {
echo "Some parameters are missing"
sh "./ci-scripts/fail.sh"
}
}
}
}
stage ("Deploy and Test") {
steps {
script {
dir ('ci-scripts') {
echo "\u2705 \u001B[32m${testStageName}\u001B[0m"
String[] myXmlTestSuite = testXMLFile.split("\\r?\\n")
for (xmlFile in myXmlTestSuite) {
if (fileExists(xmlFile)) {
mainPythonAllXmlFiles += "--XMLTestFile=" + xmlFile + " "
echo "Test XML file : ${xmlFile}"
} else {
echo "Test XML file ${xmlFile}: no such file"
}
}
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password']
]) {
sh "python3 main.py --mode=InitiateHtml --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} ${mainPythonAllXmlFiles}"
for (xmlFile in myXmlTestSuite) {
if (fileExists(xmlFile)) {
try {
timeout (time: 60, unit: 'MINUTES') {
sh "python3 main.py --mode=TesteNB --eNBIPAddress=${params.eNB_IPAddress} --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath} --EPCIPAddress=${params.EPC_IPAddress} --EPCType=${params.EPC_Type} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --XMLTestFile=${xmlFile} --OCUserName=${OC_Username} --OCPassword=${OC_Password} --OCProjectName=${OC_ProjectName}"
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
buildStageStatus = false
}
}
}
sh "python3 main.py --mode=FinalizeHtml --finalStatus=${buildStageStatus} --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}"
}
}
}
}
}
stage ("Log Collection") {
steps {
script {
dir ('ci-scripts') {
if (directoryExistsGlob("build_log_*")) {
sh "zip -r -qq build_log_${env.BUILD_ID}.zip build_log_*/*"
sh "rm -rf build_log_*/"
archiveArtifacts artifacts: "build_log_${env.BUILD_ID}.zip"
}
if (directoryExistsGlob("test_log_*")) {
sh "zip -r -qq test_log_${env.BUILD_ID}.zip test_log_*/*"
sh "rm -rf test_log_*/"
archiveArtifacts artifacts: "test_log_${env.BUILD_ID}.zip"
}
if (fileExists("test_results.html")) {
sh "mv test_results.html test_results-${env.JOB_NAME}.html"
sh "sed -i -e 's#TEMPLATE_JOB_NAME#${JOB_NAME}#' -e 's@build #TEMPLATE_BUILD_ID@build #${BUILD_ID}@' -e 's#Build-ID: TEMPLATE_BUILD_ID#Build-ID: <a href=\"${BUILD_URL}\">${BUILD_ID}</a>#' -e 's#TEMPLATE_STAGE_NAME#${testStageName}#' test_results-${JOB_NAME}.html"
archiveArtifacts artifacts: "test_results-${env.JOB_NAME}.html"
}
}
}
}
}
}
}
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
def node = "${params.JenkinsNode}"
def resource = "${params.JenkinsResource}"
pipeline {
agent {
label node
}
options {
timestamps()
ansiColor('xterm')
timeout(time: 3, unit: 'HOURS')
}
stages {
stage ("Trigger NEU CI") {
steps {
lock (resource) {
script {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.ColosseumCredentials}", usernameVariable: 'col_username', passwordVariable: 'col_password'],
]) {
// use eNB target branch variable if eNB repository is empty
def git_Branch = ""
if (params.eNB_Branch.isEmpty()) {
echo 'eNB_Branch parameter is empty, using eNB_TargetBranch instead'
git_Branch = params.eNB_TargetBranch
} else {
git_Branch = params.eNB_Branch
}
// use default 10011 rf scenario if not specified
if (params.Colosseum_Rf_Scenario.isEmpty()) {
echo 'Colosseum_Rf_Scenario parameter is empty, defaulting to 10011'
rf_scenario = "10011"
} else {
rf_scenario = params.Colosseum_Rf_Scenario
}
sh "echo Testing reachability of Colosseum endpoint"
sh "ping -c 3 10.100.1.253"
sh "./ci-scripts/colosseum_scripts/launch-job.sh ${col_username} ${col_password} ${currentBuild.number} ${params.eNB_Repository} ${git_Branch} ${rf_scenario} ${env.JOB_URL}"
}
}
}
}
}
stage ("Wait for job to finish") {
steps {
lock (resource) {
script {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.ColosseumCredentials}", usernameVariable: 'col_username', passwordVariable: 'col_password'],
]) {
timeout (time: 2, unit: 'HOURS') {
sh "./ci-scripts/colosseum_scripts/wait-job-end.sh ${col_username} ${col_password}"
}
}
}
}
}
}
stage ("Get test results") {
steps {
lock (resource) {
script {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.ColosseumCredentials}", usernameVariable: 'col_username', passwordVariable: 'col_password'],
]) {
sh "./ci-scripts/colosseum_scripts/get-test-results.sh ${col_username} ${col_password}"
}
if(fileExists("results.tar.xz")) {
archiveArtifacts "results.tar.xz"
sh "mkdir -p results"
sh "tar -xf results.tar.xz -C results --strip-components=1"
if(fileExists("results/test_summary.html")) {
archiveArtifacts "results/test_summary.html"
}
// check if test passed
sh "./ci-scripts/colosseum_scripts/check-results.sh results/test_summary.html"
sh "rm -Rf ./results"
}
}
}
}
}
stage ("Set job status") {
steps {
lock (resource) {
script {
echo 'Set job status'
sh "./ci-scripts/colosseum_scripts/set-job-status.sh"
}
}
}
}
}
}
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
// Abstraction function to send social media messages:
// like on Slack or Mattermost
def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) {
if (params.pipelineUsesSlack != null) {
if (params.pipelineUsesSlack) {
slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage
}
}
}
def doRedHatBuild = false
def doFlexranCtrlTest = false
// Location of the executor node
def nodeExecutor = params.nodeExecutor
pipeline {
agent {
label nodeExecutor
}
options {
disableConcurrentBuilds()
timestamps()
gitLabConnection('OAI GitLab')
gitlabBuilds(builds: ["Build eNb-USRP", "Build basic-sim", "Build phy-sim", "Build eNb-ethernet", "Build UE-ethernet", "Analysis with cppcheck", "Test phy-sim", "Test basic-sim", "Test L2-sim", "Test-Mono-FDD-Band7", "Test-Mono-TDD-Band40", "Test-IF4p5-FDD-Band7", "Test-IF4p5-TDD-Band40", "Test-Mono-FDD-Band13"])
ansiColor('xterm')
}
stages {
stage ("Verify Parameters") {
steps {
script {
JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
JOB_TIMESTAMP = JOB_TIMESTAMP.trim()
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
def allParametersPresent = true
if (params.RedHatRemoteServer == null) {
allParametersPresent = false
}
if (params.RedHatRemoteCredentials == null) {
allParametersPresent = false
}
if (params.RedHatWorkingPath == null) {
allParametersPresent = false
}
if (allParametersPresent) {
echo "Performing Red Hat Build"
doRedHatBuild = true
} else {
doRedHatBuild = false
}
if (params.FlexRanRtcGitLabRepository_Credentials != null) {
doFlexranCtrlTest = true
}
if (fileExists("flexran")) {
sh "rm -Rf flexran > /dev/null 2>&1"
}
}
}
}
stage ("Verify Guidelines") {
steps {
echo "Git URL is ${GIT_URL}"
echo "GitLab Act is ${env.gitlabActionType}"
script {
if ("MERGE".equals(env.gitlabActionType)) {
// since a bit, in push events, gitlabUserEmail is not populated
gitCommitAuthorEmailAddr = env.gitlabUserEmail
echo "GitLab Usermail is ${gitCommitAuthorEmailAddr}"
// GitLab-Jenkins plugin integration is lacking to perform the merge by itself
// Doing it manually --> it may have merge conflicts
sh "./ci-scripts/doGitLabMerge.sh --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}"
sh "zip -r -qq localZip.zip ."
// Running astyle options on the list of modified files by the merge request
// For the moment, there is no fail criteria. Just a notification of number of files that do not follow
sh "./ci-scripts/checkCodingFormattingRules.sh --src-branch ${env.gitlabSourceBranch} --target-branch ${env.gitlabTargetBranch}"
def res=readFile('./oai_rules_result.txt').trim();
if ("0".equals(res)) {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): All Changed files in Merge Request follow OAI Formatting Rules"
addGitLabMRComment comment: message
} else {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): Some Changed files in Merge Request DO NOT follow OAI Formatting Rules"
addGitLabMRComment comment: message
}
} else {
echo "Git Branch is ${GIT_BRANCH}"
echo "Git Commit is ${GIT_COMMIT}"
// since a bit, in push events, gitlabUserEmail is not populated
gitCommitAuthorEmailAddr = sh returnStdout: true, script: 'git log -n1 --pretty=format:%ae ${GIT_COMMIT}'
gitCommitAuthorEmailAddr = gitCommitAuthorEmailAddr.trim()
echo "GitLab Usermail is ${gitCommitAuthorEmailAddr}"
sh "git log -n1 --pretty=format:\"%s\" > .git/CI_COMMIT_MSG"
sh "zip -r -qq localZip.zip ."
// Running astyle options on all C/H files in the repository
// For the moment, there is no fail criteria. Just a notification of number of files that do not follow
sh "./ci-scripts/checkCodingFormattingRules.sh"
}
if (doFlexranCtrlTest) {
sh "mkdir flexran"
dir ('flexran') {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.FlexRanRtcGitLabRepository_Credentials}", usernameVariable: 'git_username', passwordVariable: 'git_password']
]) {
sh "git clone https://${git_username}:${git_password}@gitlab.eurecom.fr/flexran/flexran-rtc.git . > ../git_clone.log 2>&1"
}
sh "sed -i -e 's#add-apt-repository.*cleishm.*neo4j#add-apt-repository ppa:cleishm/neo4j -y#' -e 's#libneo4j-client-dev#libneo4j-client-dev -y#' tools/install_dependencies"
sh "zip -r -qq flexran.zip ."
}
}
}
}
post {
failure {
script {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): Merge Conflicts -- Cannot perform CI"
addGitLabMRComment comment: message
currentBuild.result = 'FAILURE'
}
}
}
}
stage ("Start VM -- basic-sim") {
steps {
timeout (time: 5, unit: 'MINUTES') {
sh "./ci-scripts/oai-ci-vm-tool build --workspace $WORKSPACE --variant basic-sim --job-name ${JOB_NAME} --build-id ${BUILD_ID} --daemon"
}
}
}
stage ("Start VM -- enb-usrp") {
steps {
timeout (time: 5, unit: 'MINUTES') {
sh "./ci-scripts/oai-ci-vm-tool build --workspace $WORKSPACE --variant enb-usrp --job-name ${JOB_NAME} --build-id ${BUILD_ID} --daemon"
}
}
}
stage ("Start VM -- enb-ethernet") {
steps {
timeout (time: 5, unit: 'MINUTES') {
sh "./ci-scripts/oai-ci-vm-tool build --workspace $WORKSPACE --variant enb-ethernet --job-name ${JOB_NAME} --build-id ${BUILD_ID} --daemon"
}
}
}
stage ("Start VM -- ue-ethernet") {
steps {
timeout (time: 5, unit: 'MINUTES') {
sh "./ci-scripts/oai-ci-vm-tool build --workspace $WORKSPACE --variant ue-ethernet --job-name ${JOB_NAME} --build-id ${BUILD_ID} --daemon"
}
}
}
stage ("Start VM -- phy-sim") {
steps {
timeout (time: 5, unit: 'MINUTES') {
sh "./ci-scripts/oai-ci-vm-tool build --workspace $WORKSPACE --variant phy-sim --job-name ${JOB_NAME} --build-id ${BUILD_ID} --daemon"
}
}
}
stage ("Start VM -- cppcheck") {
steps {
timeout (time: 5, unit: 'MINUTES') {
sh "./ci-scripts/oai-ci-vm-tool build --workspace $WORKSPACE --variant cppcheck --job-name ${JOB_NAME} --build-id ${BUILD_ID} --daemon"
}
}
}
stage ("Variant Builds") {
parallel {
stage ("Analysis with cppcheck") {
steps {
gitlabCommitStatus(name: "Analysis with cppcheck") {
timeout (time: 20, unit: 'MINUTES') {
sh "./ci-scripts/oai-ci-vm-tool wait --workspace $WORKSPACE --variant cppcheck --job-name ${JOB_NAME} --build-id ${BUILD_ID}"
}
}
}
}
stage ("Build eNb-USRP") {
steps {
gitlabCommitStatus(name: "Build eNb-USRP") {
timeout (time: 20, unit: 'MINUTES') {
sh "./ci-scripts/oai-ci-vm-tool wait --workspace $WORKSPACE --variant enb-usrp --job-name ${JOB_NAME} --build-id ${BUILD_ID}"
}
}
}
}
stage ("Build basic simulator") {
steps {
gitlabCommitStatus(name: "Build basic-sim") {
timeout (time: 20, unit: 'MINUTES') {
sh "./ci-scripts/oai-ci-vm-tool wait --workspace $WORKSPACE --variant basic-sim --job-name ${JOB_NAME} --build-id ${BUILD_ID} --keep-vm-alive"
}
}
}
}
stage ("Build eNb-ethernet") {
steps {
gitlabCommitStatus(name: "Build eNb-ethernet") {
timeout (time: 20, unit: 'MINUTES') {
sh "./ci-scripts/oai-ci-vm-tool wait --workspace $WORKSPACE --variant enb-ethernet --job-name ${JOB_NAME} --build-id ${BUILD_ID} --keep-vm-alive"
}
}
}
}
stage ("Build UE-ethernet") {
steps {
gitlabCommitStatus(name: "Build UE-ethernet") {
timeout (time: 20, unit: 'MINUTES') {
sh "./ci-scripts/oai-ci-vm-tool wait --workspace $WORKSPACE --variant ue-ethernet --job-name ${JOB_NAME} --build-id ${BUILD_ID} --keep-vm-alive"
}
}
}
}
stage ("Build physical simulators") {
steps {
gitlabCommitStatus(name: "Build phy-sim") {
timeout (time: 20, unit: 'MINUTES') {
sh "./ci-scripts/oai-ci-vm-tool wait --workspace $WORKSPACE --variant phy-sim --job-name ${JOB_NAME} --build-id ${BUILD_ID} --keep-vm-alive"
}
}
}
}
stage ("Build eNb-USRP on Red Hat") {
when {
expression {doRedHatBuild}
}
steps {
gitlabCommitStatus(name: "Build eNb-USRP-RHE") {
script {
try {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.RedHatRemoteCredentials}", usernameVariable: 'RH_Username', passwordVariable: 'RH_Password']
]) {
timeout (time: 20, unit: 'MINUTES') {
sh "./ci-scripts/buildOnRH.sh --workspace $WORKSPACE --job-name ${JOB_NAME} --build-id ${BUILD_ID} --remote-host ${params.RedHatRemoteServer} --remote-path ${params.RedHatWorkingPath} --remote-user-name ${RH_Username} --remote-password ${RH_Password}"
}
}
} catch (Exception e) {
echo "Red Hat build failed but we could keep running pipeline if all ubuntu-based build passed"
}
}
}
}
}
}
post {
failure {
script {
currentBuild.result = 'FAILURE'
}
}
always {
script {
dir ('archives') {
sh "zip -r -qq vm_build_logs.zip basic_sim enb_usrp phy_sim cppcheck enb_eth ue_eth red_hat"
}
if(fileExists('archives/vm_build_logs.zip')) {
archiveArtifacts artifacts: 'archives/vm_build_logs.zip'
}
if ("MERGE".equals(env.gitlabActionType)) {
sh "./ci-scripts/oai-ci-vm-tool report-build --workspace $WORKSPACE --git-url ${GIT_URL} --job-name ${JOB_NAME} --build-id ${BUILD_ID} --trigger merge-request --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}"
// If the merge request has introduced more CPPCHECK errors or warnings, notifications in GitLab
if (fileExists('oai_cppcheck_added_errors.txt')) {
def ret=readFile('./oai_cppcheck_added_errors.txt').trim();
if ("0".equals(ret)) {
echo "No added cppcheck warnings/errors in this merge request"
} else {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): Some modified files in Merge Request MAY have INTRODUCED up to " + ret + " CPPCHECK errors/warnings"
addGitLabMRComment comment: message
}
}
// If the merge request has introduced compilation warnings, notifications in GitLab
sh "./ci-scripts/checkAddedWarnings.sh --src-branch ${env.gitlabSourceBranch} --target-branch ${env.gitlabTargetBranch}"
def res=readFile('./oai_warning_files.txt').trim();
if ("0".equals(res)) {
echo "No issues w/ warnings/errors in this merge request"
} else {
def fileList=readFile('./oai_warning_files_list.txt').trim();
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): Some modified files in Merge Request MAY have INTRODUCED WARNINGS (" + fileList + ")"
addGitLabMRComment comment: message
}
} else {
sh "./ci-scripts/oai-ci-vm-tool report-build --workspace $WORKSPACE --git-url ${GIT_URL} --job-name ${JOB_NAME} --build-id ${BUILD_ID} --trigger push --branch ${GIT_BRANCH} --commit ${GIT_COMMIT}"
}
if(fileExists('build_results.html')) {
sh "sed -i -e 's#Build-ID: ${BUILD_ID}#Build-ID: <a href=\"${BUILD_URL}\">${BUILD_ID}</a>#' -e 's#TEMPLATE_BUILD_TIME#${JOB_TIMESTAMP}#' build_results.html"
archiveArtifacts artifacts: 'build_results.html'
}
}
}
}
}
stage ("Variant Tests") {
parallel {
stage ("VM-based tests") {
stages {
stage ("Test physical simulators") {
steps {
script {
timeout (time: 20, unit: 'MINUTES') {
try {
gitlabCommitStatus(name: "Test phy-sim") {
sh "./ci-scripts/oai-ci-vm-tool test --workspace $WORKSPACE --variant phy-sim --job-name ${JOB_NAME} --build-id ${BUILD_ID}"
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
}
}
}
}
}
stage ("Build Flexran Controller") {
when {
expression {doFlexranCtrlTest}
}
steps {
script {
timeout (time: 20, unit: 'MINUTES') {
try {
sh "./ci-scripts/oai-ci-vm-tool build --workspace $WORKSPACE --variant flexran-rtc --job-name ${JOB_NAME} --build-id ${BUILD_ID} --keep-vm-alive"
} catch (Exception e) {
currentBuild.result = 'FAILURE'
}
}
}
}
}
stage ("Test basic simulator") {
steps {
script {
timeout (time: 30, unit: 'MINUTES') {
try {
gitlabCommitStatus(name: "Test basic-sim") {
sh "./ci-scripts/oai-ci-vm-tool test --workspace $WORKSPACE --variant basic-sim --job-name ${JOB_NAME} --build-id ${BUILD_ID}"
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
}
}
}
}
}
stage ("Test L1 simulator") {
steps {
script {
timeout (time: 30, unit: 'MINUTES') {
try {
gitlabCommitStatus(name: "Test L1-sim") {
sh "./ci-scripts/oai-ci-vm-tool test --workspace $WORKSPACE --variant l1-sim --job-name ${JOB_NAME} --build-id ${BUILD_ID} --keep-vm-alive"
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
}
}
}
}
}
stage ("Test RF simulator") {
steps {
script {
timeout (time: 30, unit: 'MINUTES') {
try {
gitlabCommitStatus(name: "Test RF-sim") {
sh "./ci-scripts/oai-ci-vm-tool test --workspace $WORKSPACE --variant rf-sim --job-name ${JOB_NAME} --build-id ${BUILD_ID} --keep-vm-alive"
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
}
}
}
}
}
stage ("Test L2 simulator") {
steps {
script {
timeout (time: 30, unit: 'MINUTES') {
try {
gitlabCommitStatus(name: "Test L2-sim") {
sh "./ci-scripts/oai-ci-vm-tool test --workspace $WORKSPACE --variant l2-sim --job-name ${JOB_NAME} --build-id ${BUILD_ID}"
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
}
}
}
}
}
}
}
stage ("Test MONOLITHIC - FDD - Band 7 - B210") {
steps {
script {
triggerSlaveJob ('eNB-CI-FDD-Band7-B210', 'Test-Mono-FDD-Band7')
}
}
post {
always {
script {
finalizeSlaveJob('eNB-CI-FDD-Band7-B210')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
}
}
}
}
stage ("Test MONOLITHIC - TDD - Band 40 - B210") {
steps {
script {
triggerSlaveJob ('eNB-CI-TDD-Band40-B210', 'Test-Mono-TDD-Band40')
}
}
post {
always {
script {
finalizeSlaveJob('eNB-CI-TDD-Band40-B210')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
}
}
}
}
stage ("Test IF4p5 - FDD - Band 7 - B210") {
steps {
script {
triggerSlaveJob ('eNB-CI-IF4p5-FDD-Band7-B210', 'Test-IF4p5-FDD-Band7')
}
}
post {
always {
script {
finalizeSlaveJob('eNB-CI-IF4p5-FDD-Band7-B210')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
}
}
}
}
stage ("Test IF4p5 - TDD - Band 40 - B210") {
steps {
script {
triggerSlaveJob ('eNB-CI-IF4p5-TDD-Band40-B210', 'Test-IF4p5-TDD-Band40')
}
}
post {
always {
script {
finalizeSlaveJob('eNB-CI-IF4p5-TDD-Band40-B210')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
}
}
}
}
stage ("Test MONOLITHIC - FDD - Band 13 - B210") {
steps {
script {
triggerSlaveJob ('eNB-CI-MONO-FDD-Band13-B210', 'Test-Mono-FDD-Band13')
}
}
post {
always {
script {
finalizeSlaveJob('eNB-CI-MONO-FDD-Band13-B210')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
}
}
}
}
stage ("Test OAI UE - FDD - Band 20 - B200") {
steps {
script {
triggerSlaveJobNoGitLab ('UE-CI-FDD-Band20-B200')
}
}
post {
always {
script {
finalizeSlaveJob('UE-CI-FDD-Band20-B200')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
}
}
}
}
stage ("Test OAI UE - OAI eNB - FDD - Band 7 - B200") {
steps {
script {
// Delayed trigger on slave job, so it is always the last one to run
sh "sleep 240"
triggerSlaveJob ('eNB-UE-CI-MONO-FDD-Band7-B200', 'Test-eNB-OAI-UE-FDD-Band7')
}
}
post {
always {
script {
finalizeSlaveJob('eNB-UE-CI-MONO-FDD-Band7-B200')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
}
}
}
}
}
post {
always {
script {
dir ('archives') {
sh "if [ -d basic_sim/test ] || [ -d phy_sim/test ] || [ -d l2_sim/test ]; then zip -r -qq vm_tests_logs.zip */test ; fi"
}
if(fileExists('archives/vm_tests_logs.zip')) {
archiveArtifacts artifacts: 'archives/vm_tests_logs.zip'
if ("MERGE".equals(env.gitlabActionType)) {
sh "./ci-scripts/oai-ci-vm-tool report-test --workspace $WORKSPACE --git-url ${GIT_URL} --job-name ${JOB_NAME} --build-id ${BUILD_ID} --trigger merge-request --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}"
} else {
sh "./ci-scripts/oai-ci-vm-tool report-test --workspace $WORKSPACE --git-url ${GIT_URL} --job-name ${JOB_NAME} --build-id ${BUILD_ID} --trigger push --branch ${GIT_BRANCH} --commit ${GIT_COMMIT}"
}
if(fileExists('test_simulator_results.html')) {
sh "sed -i -e 's#Build-ID: ${BUILD_ID}#Build-ID: <a href=\"${BUILD_URL}\">${BUILD_ID}</a>#' -e 's#TEMPLATE_BUILD_TIME#${JOB_TIMESTAMP}#' test_simulator_results.html"
archiveArtifacts artifacts: 'test_simulator_results.html'
}
}
}
}
}
}
stage ("Destroy all Virtual Machines") {
steps {
sh "./ci-scripts/oai-ci-vm-tool destroy --job-name ${JOB_NAME} --build-id ${BUILD_ID}"
}
}
}
post {
always {
script {
// Stage destroy may not be run if error in previous stage
sh "./ci-scripts/oai-ci-vm-tool destroy --job-name ${JOB_NAME} --build-id ${BUILD_ID}"
emailext attachmentsPattern: '*results*.html',
body: '''Hi,
Here are attached HTML report files for $PROJECT_NAME - Build # $BUILD_NUMBER - $BUILD_STATUS!
Regards,
OAI CI Team''',
replyTo: 'no-reply@openairinterface.org',
subject: '$PROJECT_NAME - Build # $BUILD_NUMBER - $BUILD_STATUS!',
to: gitCommitAuthorEmailAddr
if (fileExists('.git/CI_COMMIT_MSG')) {
sh "rm -f .git/CI_COMMIT_MSG"
}
}
}
success {
script {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): passed (" + BUILD_URL + ")"
if ("MERGE".equals(env.gitlabActionType)) {
echo "This is a MERGE event"
addGitLabMRComment comment: message
def message2 = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): passed (" + BUILD_URL + ") -- MergeRequest #" + env.gitlabMergeRequestIid + " (" + env.gitlabMergeRequestTitle + ")"
sendSocialMediaMessage('ci-enb', 'good', message2)
} else {
sendSocialMediaMessage('ci-enb', 'good', message)
}
}
}
failure {
script {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): failed (" + BUILD_URL + ")"
if ("MERGE".equals(env.gitlabActionType)) {
echo "This is a MERGE event"
addGitLabMRComment comment: message
def message2 = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): failed (" + BUILD_URL + ") -- MergeRequest #" + env.gitlabMergeRequestIid + " (" + env.gitlabMergeRequestTitle + ")"
sendSocialMediaMessage('ci-enb', 'danger', message2)
} else {
sendSocialMediaMessage('ci-enb', 'danger', message)
}
}
}
}
}
// ---- Slave Job functions
def triggerSlaveJob (jobName, gitlabStatusName) {
if ("MERGE".equals(env.gitlabActionType)) {
gitlabCommitStatus(name: gitlabStatusName) {
build job: jobName,
parameters: [
string(name: 'eNB_Repository', value: String.valueOf(GIT_URL)),
string(name: 'eNB_Branch', value: String.valueOf(env.gitlabSourceBranch)),
string(name: 'eNB_CommitID', value: String.valueOf(env.gitlabMergeRequestLastCommit)),
booleanParam(name: 'eNB_mergeRequest', value: true),
string(name: 'eNB_TargetBranch', value: String.valueOf(env.gitlabTargetBranch))
]
}
} else {
gitlabCommitStatus(name: gitlabStatusName) {
build job: jobName,
parameters: [
string(name: 'eNB_Repository', value: String.valueOf(GIT_URL)),
string(name: 'eNB_Branch', value: String.valueOf(GIT_BRANCH)),
string(name: 'eNB_CommitID', value: String.valueOf(GIT_COMMIT)),
booleanParam(name: 'eNB_mergeRequest', value: false)
]
}
}
}
def triggerSlaveJobNoGitLab (jobName) {
if ("MERGE".equals(env.gitlabActionType)) {
build job: jobName,
parameters: [
string(name: 'eNB_Repository', value: String.valueOf(GIT_URL)),
string(name: 'eNB_Branch', value: String.valueOf(env.gitlabSourceBranch)),
string(name: 'eNB_CommitID', value: String.valueOf(env.gitlabMergeRequestLastCommit)),
booleanParam(name: 'eNB_mergeRequest', value: true),
string(name: 'eNB_TargetBranch', value: String.valueOf(env.gitlabTargetBranch))
]
} else {
build job: jobName,
parameters: [
string(name: 'eNB_Repository', value: String.valueOf(GIT_URL)),
string(name: 'eNB_Branch', value: String.valueOf(GIT_BRANCH)),
string(name: 'eNB_CommitID', value: String.valueOf(GIT_COMMIT)),
booleanParam(name: 'eNB_mergeRequest', value: false)
]
}
}
def finalizeSlaveJob(jobName) {
// In case of any non-success, we are retrieving the HTML report of the last completed
// slave job. The only drop-back is that we may retrieve the HTML report of a previous build
fileName = "test_results-${jobName}.html"
if (!fileExists(fileName)) {
copyArtifacts(projectName: jobName,
filter: 'test_results*.html',
selector: lastCompleted())
if (fileExists(fileName)) {
sh "sed -i -e 's#TEMPLATE_BUILD_TIME#${JOB_TIMESTAMP}#' ${fileName}"
archiveArtifacts artifacts: fileName
}
}
}
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
// Location of the executor node
def pythonExecutor = params.pythonExecutor
// Location of the test XML file to be run
def testXMLFile = params.pythonTestXmlFile
def mainPythonAllXmlFiles = ""
def buildStageStatus = true
// Name of the test stage
def testStageName = params.pipelineTestStageName
// Name of the resource
def lockResources = []
if (params.LockResources != null && params.LockResources.trim().length() > 0)
params.LockResources.trim().split(",").each{lockResources += [resource: it.trim()]}
// Global Parameters. Normally they should be populated when the master job
// triggers the slave job with parameters
def eNB_Repository
def eNB_Branch
def eNB_CommitID
def eNB_AllowMergeRequestProcess = false
def eNB_TargetBranch
// Flags
def scmEvent = false
def upstreamEvent = false
//-------------------------------------------------------------------------------
// Pipeline start
pipeline {
agent {
label pythonExecutor
}
options {
timestamps()
ansiColor('xterm')
lock(extra: lockResources)
}
stages {
stage("Build Init") {
steps {
// update the build name and description
buildName "${params.eNB_MR}"
buildDescription "Branch : ${params.eNB_Branch}"
}
}
stage ('Verify Parameters') {
steps {
script {
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
JOB_TIMESTAMP = JOB_TIMESTAMP.trim()
def allParametersPresent = true
// It is already to late to check it
if (params.pythonExecutor != null) {
echo "eNB CI executor node : ${pythonExecutor}"
}
// If not present picking a default Stage Name
if (params.pipelineTestStageName == null) {
// picking default
testStageName = 'Template Test Stage'
}
if (params.LockResources == null) {
echo "no LockResources given"
allParametersPresent = false
}
if (params.eNB_IPAddress == null) {
allParametersPresent = false
}
if (params.eNB_SourceCodePath == null) {
allParametersPresent = false
}
if (params.eNB_Credentials == null) {
allParametersPresent = false
}
if (params.eNB1_IPAddress == null) {
allParametersPresent = false
}
if (params.eNB1_SourceCodePath == null) {
allParametersPresent = false
}
if (params.eNB1_Credentials == null) {
allParametersPresent = false
}
// the following 4 parameters should be pushed by the master trigger
// if not present, take the job GIT variables (used for developing)
if (params.eNB_Repository == null) {
eNB_Repository = env.GIT_URL
} else {
eNB_Repository = params.eNB_Repository
}
echo "eNB_Repository : ${eNB_Repository}"
if (params.eNB_Branch == null) {
eNB_Branch = env.GIT_BRANCH
} else {
eNB_Branch = params.eNB_Branch
}
echo "eNB_Branch : ${eNB_Branch}"
if (params.eNB_CommitID == null) {
eNB_CommitID = env.GIT_COMMIT
} else {
eNB_CommitID = params.eNB_CommitID
}
echo "eNB_CommitID : ${eNB_CommitID}"
if (params.eNB_mergeRequest != null) {
eNB_AllowMergeRequestProcess = params.eNB_mergeRequest
if (eNB_AllowMergeRequestProcess) {
if (params.eNB_TargetBranch != null) {
eNB_TargetBranch = params.eNB_TargetBranch
} else {
eNB_TargetBranch = 'develop'
}
echo "eNB_TargetBranch : ${eNB_TargetBranch}"
}
}
if (params.EPC_IPAddress == null) {
allParametersPresent = false
}
if (params.EPC_Type == null) {
allParametersPresent = false
}
if (params.EPC_SourceCodePath == null) {
allParametersPresent = false
}
if (params.EPC_Credentials == null) {
allParametersPresent = false
}
if (params.OC_Credentials == null) {
allParametersPresent = false
}
if (params.OC_ProjectName == null) {
allParametersPresent = false
}
if (allParametersPresent) {
echo "All parameters are present"
if (eNB_AllowMergeRequestProcess) {
sh "git fetch"
sh "./ci-scripts/doGitLabMerge.sh --src-branch ${eNB_Branch} --src-commit ${eNB_CommitID} --target-branch ${eNB_TargetBranch} --target-commit latest"
} else {
sh "git fetch"
sh "git checkout -f ${eNB_CommitID}"
}
} else {
echo "Some parameters are missing"
sh "./ci-scripts/fail.sh"
}
}
}
}
stage ("Deploy and Test") {
steps {
script {
dir ('ci-scripts') {
echo "\u2705 \u001B[32m${testStageName}\u001B[0m"
// If not present picking a default XML file
if (params.pythonTestXmlFile == null) {
// picking default
testXMLFile = 'xml_files/enb_usrpB210_band7_50PRB.xml'
echo "Test XML file(default): ${testXMLFile}"
mainPythonAllXmlFiles += "--XMLTestFile=" + testXMLFile + " "
} else {
String[] myXmlTestSuite = testXMLFile.split("\\r?\\n")
for (xmlFile in myXmlTestSuite) {
if (fileExists(xmlFile)) {
mainPythonAllXmlFiles += "--XMLTestFile=" + xmlFile + " "
echo "Test XML file : ${xmlFile}"
} else {
echo "Test XML file ${xmlFile}: no such file"
}
}
}
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB1_Credentials}", usernameVariable: 'eNB1_Username', passwordVariable: 'eNB1_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password']
]) {
sh "python3 main.py --mode=InitiateHtml --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} ${mainPythonAllXmlFiles}"
String[] myXmlTestSuite = testXMLFile.split("\\r?\\n")
for (xmlFile in myXmlTestSuite) {
if (fileExists(xmlFile)) {
try {
timeout (time: 60, unit: 'MINUTES') {
sh "python3 main.py --mode=TesteNB --eNBIPAddress=${params.eNB_IPAddress} --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath} --EPCIPAddress=${params.EPC_IPAddress} --EPCType=${params.EPC_Type} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --XMLTestFile=${xmlFile} --OCUserName=${OC_Username} --OCPassword=${OC_Password} --OCProjectName=${OC_ProjectName} --eNB1IPAddress=${params.eNB1_IPAddress} --eNB1UserName=${eNB1_Username} --eNB1Password=${eNB1_Password} --eNB1SourceCodePath=${params.eNB1_SourceCodePath}"
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
buildStageStatus = false
}
}
}
sh "python3 main.py --mode=FinalizeHtml --finalStatus=${buildStageStatus} --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}"
}
}
}
}
}
stage ("Log Collection") {
steps {
script {
dir ('ci-scripts') {
// Zipping all archived log files
sh "mkdir test_logs"
sh "mv *.log* test_logs || true"
// Zip all log files matching cmake_targets/{*.log*,log/*} into test_logs_XXXX.zip
if (fileExists('../cmake_targets/log')) {
sh "mv ../cmake_targets/log/* test_logs || true"
}
sh "zip -r -qq test_logs_${env.BUILD_ID}.zip *test_log*/"
sh "rm -rf *test_log*/"
if (fileExists("test_logs_${env.BUILD_ID}.zip")) {
archiveArtifacts artifacts: "test_logs_${env.BUILD_ID}.zip"
}
if (fileExists("test_logs_CN.zip")){
sh "mv test_logs_CN.zip test_logs_CN_${env.BUILD_ID}.zip"
archiveArtifacts artifacts: "test_logs_CN_${env.BUILD_ID}.zip"
}
if (fileExists("test_results.html")) {
sh "mv test_results.html test_results-${env.JOB_NAME}.html"
sh "sed -i -e 's#TEMPLATE_JOB_NAME#${JOB_NAME}#' -e 's@build #TEMPLATE_BUILD_ID@build #${BUILD_ID}@' -e 's#Build-ID: TEMPLATE_BUILD_ID#Build-ID: <a href=\"${BUILD_URL}\">${BUILD_ID}</a>#' -e 's#TEMPLATE_STAGE_NAME#${testStageName}#' test_results-${JOB_NAME}.html"
archiveArtifacts artifacts: "test_results-${env.JOB_NAME}.html"
}
}
}
}
}
}
}
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
// Location of the executor node
def pythonExecutor = params.pythonExecutor
// Location of the test XML file to be run
def testXMLFile = params.pythonTestXmlFile
def mainPythonAllXmlFiles = ""
def buildStageStatus = true
// Name of the test stage
def testStageName = params.pipelineTestStageName
// Name of the resource
def lockResources = []
if (params.LockResources != null && params.LockResources.trim().length() > 0)
params.LockResources.trim().split(",").each{lockResources += [resource: it.trim()]}
// Global Parameters. Normally they should be populated when the master job
// triggers the slave job with parameters
def eNB_Repository
def eNB_Branch
def eNB_CommitID
def eNB_AllowMergeRequestProcess = false
def eNB_TargetBranch
// Flags
def scmEvent = false
def upstreamEvent = false
//-------------------------------------------------------------------------------
// Pipeline start
pipeline {
agent {
label pythonExecutor
}
options {
timestamps()
ansiColor('xterm')
lock(extra: lockResources)
}
stages {
stage("Build Init") {
steps {
// update the build name and description
buildName "${params.eNB_MR}"
buildDescription "Branch : ${params.eNB_Branch}"
}
}
stage ('Verify Parameters') {
steps {
script {
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
JOB_TIMESTAMP = JOB_TIMESTAMP.trim()
def allParametersPresent = true
// It is already to late to check it
if (params.pythonExecutor != null) {
echo "eNB CI executor node : ${pythonExecutor}"
}
// If not present picking a default Stage Name
if (params.pipelineTestStageName == null) {
// picking default
testStageName = 'Template Test Stage'
}
if (params.LockResources == null) {
echo "no LockResources given"
allParametersPresent = false
}
if (params.eNB_IPAddress == null) {
allParametersPresent = false
}
if (params.eNB_SourceCodePath == null) {
allParametersPresent = false
}
if (params.eNB_Credentials == null) {
allParametersPresent = false
}
// the following 4 parameters should be pushed by the master trigger
// if not present, take the job GIT variables (used for developing)
if (params.eNB_Repository == null) {
eNB_Repository = env.GIT_URL
} else {
eNB_Repository = params.eNB_Repository
}
echo "eNB_Repository : ${eNB_Repository}"
if (params.eNB_Branch == null) {
eNB_Branch = env.GIT_BRANCH
} else {
eNB_Branch = params.eNB_Branch
}
echo "eNB_Branch : ${eNB_Branch}"
if (params.eNB_CommitID == null) {
eNB_CommitID = env.GIT_COMMIT
} else {
eNB_CommitID = params.eNB_CommitID
}
echo "eNB_CommitID : ${eNB_CommitID}"
if (params.eNB_mergeRequest != null) {
eNB_AllowMergeRequestProcess = params.eNB_mergeRequest
if (eNB_AllowMergeRequestProcess) {
if (params.eNB_TargetBranch != null) {
eNB_TargetBranch = params.eNB_TargetBranch
} else {
eNB_TargetBranch = 'develop'
}
echo "eNB_TargetBranch : ${eNB_TargetBranch}"
}
}
if (params.EPC_IPAddress == null) {
allParametersPresent = false
}
if (params.EPC_Type == null) {
allParametersPresent = false
}
if (params.EPC_SourceCodePath == null) {
allParametersPresent = false
}
if (params.EPC_Credentials == null) {
allParametersPresent = false
}
if (params.OC_Credentials == null) {
allParametersPresent = false
}
if (params.OC_ProjectName == null) {
allParametersPresent = false
}
if (allParametersPresent) {
echo "All parameters are present"
if (eNB_AllowMergeRequestProcess) {
sh "git fetch"
sh "./ci-scripts/doGitLabMerge.sh --src-branch ${eNB_Branch} --src-commit ${eNB_CommitID} --target-branch ${eNB_TargetBranch} --target-commit latest"
} else {
sh "git fetch"
sh "git checkout -f ${eNB_CommitID}"
}
} else {
echo "Some parameters are missing"
sh "./ci-scripts/fail.sh"
}
}
}
}
stage ("Deploy and Test") {
steps {
script {
dir ('ci-scripts') {
echo "\u2705 \u001B[32m${testStageName}\u001B[0m"
// If not present picking a default XML file
if (params.pythonTestXmlFile == null) {
// picking default
testXMLFile = 'xml_files/enb_usrpB210_band7_50PRB.xml'
echo "Test XML file(default): ${testXMLFile}"
mainPythonAllXmlFiles += "--XMLTestFile=" + testXMLFile + " "
} else {
String[] myXmlTestSuite = testXMLFile.split("\\r?\\n")
for (xmlFile in myXmlTestSuite) {
if (fileExists(xmlFile)) {
mainPythonAllXmlFiles += "--XMLTestFile=" + xmlFile + " "
echo "Test XML file : ${xmlFile}"
} else {
echo "Test XML file ${xmlFile}: no such file"
}
}
}
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.OC_Credentials}", usernameVariable: 'OC_Username', passwordVariable: 'OC_Password']
]) {
sh "python3 main.py --mode=InitiateHtml --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} ${mainPythonAllXmlFiles}"
String[] myXmlTestSuite = testXMLFile.split("\\r?\\n")
for (xmlFile in myXmlTestSuite) {
if (fileExists(xmlFile)) {
try {
timeout (time: 60, unit: 'MINUTES') {
sh "python3 main.py --mode=TesteNB --eNBIPAddress=${params.eNB_IPAddress} --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath} --EPCIPAddress=${params.EPC_IPAddress} --EPCType=${params.EPC_Type} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --XMLTestFile=${xmlFile} --OCUserName=${OC_Username} --OCPassword=${OC_Password} --OCProjectName=${OC_ProjectName}"
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
buildStageStatus = false
}
}
}
sh "python3 main.py --mode=FinalizeHtml --finalStatus=${buildStageStatus} --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}"
}
}
}
}
}
stage ("Log Collection") {
steps {
script {
dir ('ci-scripts') {
// Zipping all archived log files
sh "mkdir test_logs"
sh "mv *.log* test_logs || true"
// Zip all log files matching cmake_targets/{*.log*,log/*} into test_logs_XXXX.zip
if (fileExists('../cmake_targets/log')) {
sh "mv ../cmake_targets/log/* test_logs || true"
}
sh "zip -r -qq test_logs_${env.BUILD_ID}.zip *test_log*/"
sh "rm -rf *test_log*/"
if (fileExists("test_logs_${env.BUILD_ID}.zip")) {
archiveArtifacts artifacts: "test_logs_${env.BUILD_ID}.zip"
}
if (fileExists("test_logs_CN.zip")){
sh "mv test_logs_CN.zip test_logs_CN_${env.BUILD_ID}.zip"
archiveArtifacts artifacts: "test_logs_CN_${env.BUILD_ID}.zip"
}
if (fileExists("test_results.html")) {
sh "mv test_results.html test_results-${env.JOB_NAME}.html"
sh "sed -i -e 's#TEMPLATE_JOB_NAME#${JOB_NAME}#' -e 's@build #TEMPLATE_BUILD_ID@build #${BUILD_ID}@' -e 's#Build-ID: TEMPLATE_BUILD_ID#Build-ID: <a href=\"${BUILD_URL}\">${BUILD_ID}</a>#' -e 's#TEMPLATE_STAGE_NAME#${testStageName}#' test_results-${JOB_NAME}.html"
archiveArtifacts artifacts: "test_results-${env.JOB_NAME}.html"
}
}
}
}
}
}
}
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
// Location of the python executor node shall be in the same subnet as the others servers
def nodeExecutor = params.nodeExecutor
// Name of the resource
def lockResources = []
if (params.LockResources != null && params.LockResources.trim().length() > 0)
params.LockResources.trim().split(",").each{lockResources += [resource: it.trim()]}
// Docker Hub account to push to
def DH_Account = "oaisoftwarealliance"
pipeline {
agent {
label nodeExecutor
}
options {
ansiColor('xterm')
lock(extra: lockResources)
}
stages {
stage ("Verify Parameters") {
steps {
script {
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
def allParametersPresent = true
// It is already to late to check it
if (params.nodeExecutor != null) {
echo "Docker Push executor node : ${nodeExecutor}"
}
if (params.LockResources == null) {
echo "no LockResources given"
allParametersPresent = false
}
}
}
}
stage ("Push to DockerHub") {
steps {
script {
WEEK_REF = sh returnStdout: true, script: 'date +"%Y.w%V"'
WEEK_REF = WEEK_REF.trim()
WEEK_TAG = sh returnStdout: true, script: 'python3 ./ci-scripts/provideUniqueImageTag.py --start_tag ' + WEEK_REF
WEEK_TAG = WEEK_TAG.trim()
if ((params.forceTag != null) && (params.tagToUse != null)) {
if (params.forceTag) {
WEEK_TAG = params.tagToUse
echo "Forced Tag is ${WEEK_TAG}"
}
}
WEEK_SHA = sh returnStdout: true, script: 'git log -n1 --pretty=format:"%h" origin/develop | cut -c 1-8'
WEEK_SHA = WEEK_SHA.trim()
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.DH_Credentials}", usernameVariable: 'DH_Username', passwordVariable: 'DH_Password']
]) {
def listOfImages = ["oai-enb", "oai-gnb", "oai-lte-ue", "oai-nr-ue", "oai-nr-cuup", "oai-gnb-fhi72"]
// Logging in on both registries
sh "docker login -u ${DH_Username} -p ${DH_Password} > /dev/null 2>&1"
sh "docker login -u oaicicd -p oaicicd porcepix.sboai.cs.eurecom.fr > /dev/null 2>&1"
listOfImages.eachWithIndex { item, iindex ->
sh "docker pull --quiet porcepix.sboai.cs.eurecom.fr/${item}:develop-${WEEK_SHA}"
sh "docker image tag porcepix.sboai.cs.eurecom.fr/${item}:develop-${WEEK_SHA} ${DH_Account}/${item}:develop"
sh "docker image tag porcepix.sboai.cs.eurecom.fr/${item}:develop-${WEEK_SHA} ${DH_Account}/${item}:${WEEK_TAG}"
sh "docker push --quiet ${DH_Account}/${item}:${WEEK_TAG}"
sh "docker push --quiet ${DH_Account}/${item}:develop"
sh "docker rmi ${DH_Account}/${item}:${WEEK_TAG} ${DH_Account}/${item}:develop porcepix.sboai.cs.eurecom.fr/${item}:develop-${WEEK_SHA}"
}
// Proxy is not following the same pattern.
sh "docker image tag proxy:develop ${DH_Account}/proxy:develop"
sh "docker image tag proxy:develop ${DH_Account}/proxy:${WEEK_TAG}"
sh "docker push --quiet ${DH_Account}/proxy:develop"
sh "docker push --quiet ${DH_Account}/proxy:${WEEK_TAG}"
sh "docker rmi ${DH_Account}/proxy:develop ${DH_Account}/proxy:${WEEK_TAG}"
// Logging out on both registries
sh "docker logout porcepix.sboai.cs.eurecom.fr > /dev/null 2>&1"
sh "docker logout > /dev/null 2>&1"
}
}
}
}
}
post {
always {
script {
sh "docker logout porcepix.sboai.cs.eurecom.fr > /dev/null 2>&1"
sh "docker logout > /dev/null 2>&1"
echo "End of Registry Push"
}
}
}
}
......@@ -33,18 +33,14 @@ def buildStageStatus = true
// Name of the test stage
def testStageName = params.pipelineTestStageName
// Name of the phone resource
def ciSmartPhoneResource = params.smartphonesResource
// Name of the phone resource
def oaiUEResource = params.oaiUEResource
def lockResources = []
if (params.LockResources != null && params.LockResources.trim().length() > 0)
params.LockResources.trim().split(",").each{lockResources += [resource: it.trim()]}
// Terminate Status
def termENB = 0
def termOAIUE = 1
def termStatusArray = new Boolean[termOAIUE + 1]
def termStatusArray = new Boolean[termENB + 1]
termStatusArray[termENB] = false
termStatusArray[termOAIUE] = false
// Global Parameters. Normally they should be populated when the master job
// triggers the slave job with parameters
......@@ -54,17 +50,27 @@ def eNB_CommitID
def eNB_AllowMergeRequestProcess = false
def eNB_TargetBranch
def doEpcLogCollection = true
pipeline {
agent {
label pythonExecutor
}
options {
disableConcurrentBuilds()
ansiColor('xterm')
lock(extra: [[resource: ciSmartPhoneResource]], resource: ciSmartPhoneResource)
lock(extra: lockResources)
}
stages {
stage("Build Init") {
steps {
// update the build name and description
buildName "${params.eNB_MR}"
buildDescription "Branch : ${params.eNB_Branch}"
}
}
stage ("Verify Parameters") {
steps {
script {
......@@ -75,32 +81,15 @@ pipeline {
if (params.pythonExecutor != null) {
echo "eNB CI executor node : ${pythonExecutor}"
}
// If not present picking a default XML file
if (params.pythonTestXmlFile == null) {
// picking default
testXMLFile = 'xml_files/enb_usrpB210_band7_50PRB.xml'
echo "Test XML file(default): ${testXMLFile}"
mainPythonAllXmlFiles += "--XMLTestFile=" + testXMLFile + " "
} else {
String[] myXmlTestSuite = testXMLFile.split("\\r?\\n")
for (xmlFile in myXmlTestSuite) {
if (fileExists("ci-scripts/" + xmlFile)) {
mainPythonAllXmlFiles += "--XMLTestFile=" + xmlFile + " "
echo "Test XML file : ${xmlFile}"
}
}
}
// If not present picking a default Stage Name
if (params.pipelineTestStageName == null) {
// picking default
testStageName = 'Template Test Stage'
}
if (params.smartphonesResource == null) {
allParametersPresent = false
}
if (params.oaiUEResource == null) {
allParametersPresent = false
if (params.LockResources == null) {
echo "no LockResources given"
allParametersPresent = false
}
if (params.eNB_IPAddress == null) {
allParametersPresent = false
......@@ -154,6 +143,11 @@ pipeline {
if (params.EPC_IPAddress == null) {
allParametersPresent = false
} else {
if (params.EPC_IPAddress == "none") {
doEpcLogCollection = false
echo "No EPC collections (ping/iperf/hss/mme/spgw)"
}
}
if (params.EPC_Type == null) {
allParametersPresent = false
......@@ -165,13 +159,6 @@ pipeline {
allParametersPresent = false
}
if (params.ADB_IPAddress == null) {
allParametersPresent = false
}
if (params.ADB_Credentials == null) {
allParametersPresent = false
}
if (allParametersPresent) {
echo "All parameters are present"
if (eNB_AllowMergeRequestProcess) {
......@@ -182,8 +169,8 @@ pipeline {
sh "git checkout -f ${eNB_CommitID}"
}
} else {
echo "Some parameters are missing"
sh "./ci-scripts/fail.sh"
currentBuild.result = 'ABORTED'
error('Stopping early because some parameters are missing')
}
}
}
......@@ -193,18 +180,36 @@ pipeline {
script {
dir ('ci-scripts') {
echo "\u2705 \u001B[32m${testStageName}\u001B[0m"
// If not present picking a default XML file
if (params.pythonTestXmlFile == null) {
// picking default
testXMLFile = 'xml_files/enb_usrpB210_band7_50PRB.xml'
echo "Test XML file(default): ${testXMLFile}"
mainPythonAllXmlFiles += "--XMLTestFile=" + testXMLFile + " "
} else {
String[] myXmlTestSuite = testXMLFile.split("\\r?\\n")
for (xmlFile in myXmlTestSuite) {
if (fileExists(xmlFile)) {
mainPythonAllXmlFiles += "--XMLTestFile=" + xmlFile + " "
echo "Test XML file : ${xmlFile}"
} else {
echo "Test XML file ${xmlFile}: no such file"
}
}
}
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.UE_Credentials}", usernameVariable: 'UE_Username', passwordVariable: 'UE_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.ADB_Credentials}", usernameVariable: 'ADB_Username', passwordVariable: 'ADB_Password']
]) {
sh "python3 main.py --mode=InitiateHtml --eNBRepository=${eNB_Repository} --eNBBranch=${eNB_Branch} --eNBCommitID=${eNB_CommitID} --eNB_AllowMerge=${eNB_AllowMergeRequestProcess} --eNBTargetBranch=${eNB_TargetBranch} --ADBIPAddress=${params.ADB_IPAddress} --ADBUserName=${ADB_Username} --ADBPassword=${ADB_Password} ${mainPythonAllXmlFiles}"
sh "python3 main.py --mode=InitiateHtml --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} ${mainPythonAllXmlFiles}"
String[] myXmlTestSuite = testXMLFile.split("\\r?\\n")
for (xmlFile in myXmlTestSuite) {
if (fileExists(xmlFile)) {
try {
sh "python3 main.py --mode=TesteNB --eNBRepository=${eNB_Repository} --eNBBranch=${eNB_Branch} --eNBCommitID=${eNB_CommitID} --eNB_AllowMerge=${eNB_AllowMergeRequestProcess} --eNBTargetBranch=${eNB_TargetBranch} --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath} --UEIPAddress=${params.UE_IPAddress} --UEUserName=${UE_Username} --UEPassword=${UE_Password} --UESourceCodePath=${params.UE_SourceCodePath} --EPCIPAddress=${params.EPC_IPAddress} --EPCType=${params.EPC_Type} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --ADBIPAddress=${params.ADB_IPAddress} --ADBUserName=${ADB_Username} --ADBPassword=${ADB_Password} --XMLTestFile=${xmlFile}"
timeout (time: 60, unit: 'MINUTES') {
sh "python3 main.py --mode=TesteNB --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath} --UEIPAddress=${params.UE_IPAddress} --UEUserName=${UE_Username} --UEPassword=${UE_Password} --UESourceCodePath=${params.UE_SourceCodePath} --EPCIPAddress=${params.EPC_IPAddress} --EPCType=${params.EPC_Type} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --XMLTestFile=${xmlFile}"
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
buildStageStatus = false
......@@ -236,34 +241,53 @@ pipeline {
}
}
}
stage('Terminate OAI-UE') {
}
}
stage('Log Collection') {
parallel {
stage('Log Collection (eNB - Build)') {
steps {
echo '\u2705 \u001B[32mTerminate OAI-UE\u001B[0m'
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.UE_Credentials}", usernameVariable: 'UE_Username', passwordVariable: 'UE_Password']
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password']
]) {
sh "python3 ci-scripts/main.py --mode=TerminateOAIUE --UEIPAddress=${params.UE_IPAddress} --UEUserName=${UE_Username} --UEPassword=${UE_Password}"
echo '\u2705 \u001B[32mLog Collection (eNB - Build)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollectBuild --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath}"
echo '\u2705 \u001B[32mLog Transfer (eNB - Build)\u001B[0m'
sh "sshpass -p \'${eNB_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${eNB_Username}@${params.eNB_IPAddress}:${eNB_SourceCodePath}/cmake_targets/build.log.zip ./build.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("build.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "build.log.${env.BUILD_ID}.zip"
}
}
}
post {
success {
script {
termStatusArray[termOAIUE] = true
}
stage('Log Collection (OAI UE - Build)') {
steps {
echo '\u2705 \u001B[32mLog Collection (OAI UE - Build)\u001B[0m'
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.UE_Credentials}", usernameVariable: 'UE_Username', passwordVariable: 'UE_Password']
]) {
sh "python3 ci-scripts/main.py --mode=LogCollectBuild --UEIPAddress=${params.UE_IPAddress} --UEUserName=${UE_Username} --UEPassword=${UE_Password} --UESourceCodePath=${params.UE_SourceCodePath}"
echo '\u2705 \u001B[32mLog Transfer (UE - Build)\u001B[0m'
sh "sshpass -p \'${UE_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${UE_Username}@${params.UE_IPAddress}:${UE_SourceCodePath}/cmake_targets/build.log.zip ./build.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("build.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "build.log.${env.BUILD_ID}.zip"
}
}
}
}
}
}
stage('Log Collection') {
parallel {
stage('Log Collection (eNB - Run)') {
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password']
]) {
echo '\u2705 \u001B[32mLog Collection (eNB - Run)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollecteNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath}"
sh "python3 ci-scripts/main.py --mode=LogCollecteNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath} --BuildId=${env.BUILD_ID}"
echo '\u2705 \u001B[32mLog Transfer (eNB - Run)\u001B[0m'
sh "sshpass -p \'${eNB_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${eNB_Username}@${params.eNB_IPAddress}:${eNB_SourceCodePath}/cmake_targets/enb.log.zip ./enb.log.${env.BUILD_ID}.zip || true"
......@@ -299,6 +323,9 @@ pipeline {
}
}
stage('Log Collection (Ping)') {
when {
expression {doEpcLogCollection}
}
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
......@@ -317,6 +344,9 @@ pipeline {
}
}
stage('Log Collection (Iperf)') {
when {
expression {doEpcLogCollection}
}
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
......@@ -335,6 +365,9 @@ pipeline {
}
}
stage('Log Collection (SPGW)') {
when {
expression {doEpcLogCollection}
}
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
......@@ -353,6 +386,9 @@ pipeline {
}
}
stage('Log Collection (MME)') {
when {
expression {doEpcLogCollection}
}
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
......@@ -371,6 +407,9 @@ pipeline {
}
}
stage('Log Collection (HSS)') {
when {
expression {doEpcLogCollection}
}
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
......@@ -417,13 +456,6 @@ pipeline {
sh "python3 ci-scripts/main.py --mode=TerminateeNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}"
}
}
if (!termStatusArray[termOAIUE]) {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.UE_Credentials}", usernameVariable: 'UE_Username', passwordVariable: 'UE_Password']
]) {
sh "python3 ci-scripts/main.py --mode=TerminateOAIUE --UEIPAddress=${params.UE_IPAddress} --UEUserName=${UE_Username} --UEPassword=${UE_Password}"
}
}
}
}
}
......
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
// Template Jenkins Declarative Pipeline script to run Test w/ RF HW
// Location of the python executor node shall be in the same subnet as the others servers
def pythonExecutor = params.pythonExecutor
// Location of the test XML file to be run
def testXMLFile = params.pythonTestXmlFile
def mainPythonAllXmlFiles = ""
def buildStageStatus = true
// Name of the test stage
def testStageName = params.pipelineTestStageName
def lockResources = []
if (params.LockResources != null && params.LockResources.trim().length() > 0)
params.LockResources.trim().split(",").each{lockResources += [resource: it.trim()]}
// Global Parameters. Normally they should be populated when the master job
// triggers the slave job with parameters
def eNB_Repository
def eNB_Branch
def eNB_CommitID
def eNB_AllowMergeRequestProcess = false
def eNB_TargetBranch
pipeline {
agent {
label pythonExecutor
}
options {
ansiColor('xterm')
lock(extra: lockResources)
}
stages {
stage("Build Init") {
steps {
// update the build name and description
buildName "${params.eNB_MR}"
buildDescription "Branch : ${params.eNB_Branch}"
}
}
stage ("Verify Parameters") {
steps {
script {
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
def allParametersPresent = true
// It is already to late to check it
if (params.pythonExecutor != null) {
echo "eNB CI executor node : ${pythonExecutor}"
}
// If not present picking a default Stage Name
if (params.pipelineTestStageName == null) {
// picking default
testStageName = 'Template Test Stage'
}
if (params.LockResources == null) {
echo "no LockResources given"
allParametersPresent = false
}
// 1st eNB parameters
if (params.eNB_IPAddress == null) {
allParametersPresent = false
}
if (params.eNB_SourceCodePath == null) {
allParametersPresent = false
}
if (params.eNB_Credentials == null) {
allParametersPresent = false
}
// 2nd eNB parameters
if (params.eNB1_IPAddress == null) {
allParametersPresent = false
}
if (params.eNB1_SourceCodePath == null) {
allParametersPresent = false
}
if (params.eNB1_Credentials == null) {
allParametersPresent = false
}
// 3rd eNB parameters
if (params.eNB2_IPAddress == null) {
allParametersPresent = false
}
if (params.eNB2_SourceCodePath == null) {
allParametersPresent = false
}
if (params.eNB2_Credentials == null) {
allParametersPresent = false
}
// the following 4 parameters should be pushed by the master trigger
// if not present, take the job GIT variables (used for developing)
if (params.eNB_Repository == null) {
eNB_Repository = env.GIT_URL
} else {
eNB_Repository = params.eNB_Repository
}
echo "eNB_Repository : ${eNB_Repository}"
if (params.eNB_Branch == null) {
eNB_Branch = env.GIT_BRANCH
} else {
eNB_Branch = params.eNB_Branch
}
echo "eNB_Branch : ${eNB_Branch}"
if (params.eNB_CommitID == null) {
eNB_CommitID = env.GIT_COMMIT
} else {
eNB_CommitID = params.eNB_CommitID
}
echo "eNB_CommitID : ${eNB_CommitID}"
if (params.eNB_mergeRequest != null) {
eNB_AllowMergeRequestProcess = params.eNB_mergeRequest
if (eNB_AllowMergeRequestProcess) {
if (params.eNB_TargetBranch != null) {
eNB_TargetBranch = params.eNB_TargetBranch
} else {
eNB_TargetBranch = 'develop'
}
echo "eNB_TargetBranch : ${eNB_TargetBranch}"
}
}
if (params.EPC_IPAddress == null) {
allParametersPresent = false
}
if (params.EPC_Type == null) {
allParametersPresent = false
}
if (params.EPC_SourceCodePath == null) {
allParametersPresent = false
}
if (params.EPC_Credentials == null) {
allParametersPresent = false
}
if (allParametersPresent) {
echo "All parameters are present"
if (eNB_AllowMergeRequestProcess) {
sh "git fetch"
sh "./ci-scripts/doGitLabMerge.sh --src-branch ${eNB_Branch} --src-commit ${eNB_CommitID} --target-branch ${eNB_TargetBranch} --target-commit latest"
} else {
sh "git fetch"
sh "git checkout -f ${eNB_CommitID}"
}
} else {
echo "Some parameters are missing"
sh "./ci-scripts/fail.sh"
}
}
}
}
stage ("Build and Test") {
steps {
script {
dir ('ci-scripts') {
echo "\u2705 \u001B[32m${testStageName}\u001B[0m"
// If not present picking a default XML file
if (params.pythonTestXmlFile == null) {
// picking default
testXMLFile = 'xml_files/enb_usrpB210_band7_50PRB.xml'
echo "Test XML file(default): ${testXMLFile}"
mainPythonAllXmlFiles += "--XMLTestFile=" + testXMLFile + " "
} else {
String[] myXmlTestSuite = testXMLFile.split("\\r?\\n")
for (xmlFile in myXmlTestSuite) {
if (fileExists(xmlFile)) {
mainPythonAllXmlFiles += "--XMLTestFile=" + xmlFile + " "
echo "Test XML file : ${xmlFile}"
} else {
echo "Test XML file ${xmlFile}: no such file"
}
}
}
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB1_Credentials}", usernameVariable: 'eNB1_Username', passwordVariable: 'eNB1_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB2_Credentials}", usernameVariable: 'eNB2_Username', passwordVariable: 'eNB2_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'],
]) {
sh "python3 main.py --mode=InitiateHtml --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} ${mainPythonAllXmlFiles}"
String[] myXmlTestSuite = testXMLFile.split("\\r?\\n")
for (xmlFile in myXmlTestSuite) {
if (fileExists(xmlFile)) {
try {
timeout (time: 60, unit: 'MINUTES') {
sh "python3 main.py --mode=TesteNB --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath} --eNB1IPAddress=${params.eNB1_IPAddress} --eNB1UserName=${eNB1_Username} --eNB1Password=${eNB1_Password} --eNB1SourceCodePath=${params.eNB1_SourceCodePath} --eNB2IPAddress=${params.eNB2_IPAddress} --eNB2UserName=${eNB2_Username} --eNB2Password=${eNB2_Password} --eNB2SourceCodePath=${params.eNB2_SourceCodePath} --EPCIPAddress=${params.EPC_IPAddress} --EPCType=${params.EPC_Type} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --XMLTestFile=${xmlFile}"
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
buildStageStatus = false
}
}
}
sh "python3 main.py --mode=FinalizeHtml --finalStatus=${buildStageStatus} --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}"
}
}
}
}
}
stage ("Terminate") {
parallel {
stage('Terminate eNB') {
steps {
echo '\u2705 \u001B[32mTerminate eNB\u001B[0m'
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password']
]) {
sh "python3 ci-scripts/main.py --mode=TerminateeNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}"
}
}
}
stage('Terminate SPGW') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps {
echo '\u2705 \u001B[32mTerminate SPGW\u001B[0m'
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
]) {
sh "python3 ci-scripts/main.py --mode=TerminateSPGW --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
}
}
}
stage('Terminate MME') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps {
echo '\u2705 \u001B[32mTerminate MME\u001B[0m'
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
]) {
sh "python3 ci-scripts/main.py --mode=TerminateMME --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
}
}
}
stage('Terminate HSS') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps {
echo '\u2705 \u001B[32mTerminate HSS\u001B[0m'
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
]) {
sh "python3 ci-scripts/main.py --mode=TerminateHSS --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
}
}
}
}
}
stage('Log Collection') {
parallel {
stage('Log Collection (eNB - Build)') {
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password']
]) {
echo '\u2705 \u001B[32mLog Collection (eNB - Build)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollectBuild --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath}"
echo '\u2705 \u001B[32mLog Transfer (eNB - Build)\u001B[0m'
sh "sshpass -p \'${eNB_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${eNB_Username}@${params.eNB_IPAddress}:${eNB_SourceCodePath}/cmake_targets/build.log.zip ./build.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("build.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "build.log.${env.BUILD_ID}.zip"
}
}
}
}
stage('Log Collection (eNB - Run)') {
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password']
]) {
echo '\u2705 \u001B[32mLog Collection (eNB - Run)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollecteNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath} --BuildId=${env.BUILD_ID}"
echo '\u2705 \u001B[32mLog Transfer (eNB - Run)\u001B[0m'
sh "sshpass -p \'${eNB_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${eNB_Username}@${params.eNB_IPAddress}:${eNB_SourceCodePath}/cmake_targets/enb.log.zip ./enb.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("enb.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "enb.log.${env.BUILD_ID}.zip"
}
if(fileExists("ci-scripts/test_results.html")) {
sh "mv ci-scripts/test_results.html test_results-${JOB_NAME}.html"
sh "sed -i -e 's#TEMPLATE_JOB_NAME#${JOB_NAME}#' -e 's@build #TEMPLATE_BUILD_ID@build #${BUILD_ID}@' -e 's#Build-ID: TEMPLATE_BUILD_ID#Build-ID: <a href=\"${BUILD_URL}\">${BUILD_ID}</a>#' -e 's#TEMPLATE_STAGE_NAME#${testStageName}#' test_results-${JOB_NAME}.html"
archiveArtifacts "test_results-${JOB_NAME}.html"
}
}
}
}
stage('Log Collection (SPGW)') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
]) {
echo '\u2705 \u001B[32mLog Collection (SPGW)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollectSPGW --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --EPCType=${params.EPC_Type}"
echo '\u2705 \u001B[32mLog Transfer (SPGW)\u001B[0m'
sh "sshpass -p \'${EPC_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${EPC_Username}@${params.EPC_IPAddress}:${EPC_SourceCodePath}/scripts/spgw.log.zip ./spgw.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("spgw.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "spgw.log.${env.BUILD_ID}.zip"
}
}
}
}
stage('Log Collection (MME)') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
]) {
echo '\u2705 \u001B[32mLog Collection (MME)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollectMME --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --EPCType=${params.EPC_Type}"
echo '\u2705 \u001B[32mLog Transfer (MME)\u001B[0m'
sh "sshpass -p \'${EPC_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${EPC_Username}@${params.EPC_IPAddress}:${EPC_SourceCodePath}/scripts/mme.log.zip ./mme.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("mme.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "mme.log.${env.BUILD_ID}.zip"
}
}
}
}
stage('Log Collection (HSS)') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
]) {
echo '\u2705 \u001B[32mLog Collection (HSS)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollectHSS --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --EPCType=${params.EPC_Type}"
echo '\u2705 \u001B[32mLog Transfer (HSS)\u001B[0m'
sh "sshpass -p \'${EPC_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${EPC_Username}@${params.EPC_IPAddress}:${EPC_SourceCodePath}/scripts/hss.log.zip ./hss.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("hss.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "hss.log.${env.BUILD_ID}.zip"
}
}
}
}
stage('Log Collection (Ping)') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
]) {
echo '\u2705 \u001B[32mLog Collection (Ping)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollectPing --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --EPCType=${params.EPC_Type}"
echo '\u2705 \u001B[32mLog Transfer (Ping)\u001B[0m'
sh "sshpass -p \'${EPC_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${EPC_Username}@${params.EPC_IPAddress}:${EPC_SourceCodePath}/scripts/ping.log.zip ./ping.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("ping.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "ping.log.${env.BUILD_ID}.zip"
}
}
}
}
stage('Log Collection (Iperf)') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
]) {
echo '\u2705 \u001B[32mLog Collection (Iperf)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollectIperf --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --EPCType=${params.EPC_Type}"
echo '\u2705 \u001B[32mLog Transfer (Iperf)\u001B[0m'
sh "sshpass -p \'${EPC_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${EPC_Username}@${params.EPC_IPAddress}:${EPC_SourceCodePath}/scripts/iperf.log.zip ./iperf.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("iperf.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "iperf.log.${env.BUILD_ID}.zip"
}
}
}
}
}
}
}
post {
always {
script {
if (params.pipelineZipsConsoleLog != null) {
if (params.pipelineZipsConsoleLog) {
echo "Archiving Jenkins console log"
sh "wget --no-check-certificate --no-proxy ${env.JENKINS_URL}/job/${env.JOB_NAME}/${env.BUILD_ID}/consoleText -O consoleText.log || true"
sh "zip -m consoleText.log.${env.BUILD_ID}.zip consoleText.log || true"
if(fileExists("consoleText.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "consoleText.log.${env.BUILD_ID}.zip"
}
}
}
}
}
}
}
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
// Template Jenkins Declarative Pipeline script to run Test w/ RF HW
// Location of the python executor node shall be in the same subnet as the others servers
def pythonExecutor = params.pythonExecutor
// Location of the test XML file to be run
def testXMLFile = params.pythonTestXmlFile
def mainPythonAllXmlFiles = ""
def buildStageStatus = true
// Name of the test stage
def testStageName = params.pipelineTestStageName
def lockResources = []
if (params.LockResources != null && params.LockResources.trim().length() > 0)
params.LockResources.trim().split(",").each{lockResources += [resource: it.trim()]}
// Global Parameters. Normally they should be populated when the master job
// triggers the slave job with parameters
def eNB_Repository
def eNB_Branch
def eNB_CommitID
def eNB_AllowMergeRequestProcess
def eNB_TargetBranch
//Status fed to the database
def StatusForDb = ""
pipeline {
agent {label pythonExecutor}
options {
ansiColor('xterm')
lock(extra: lockResources)
}
stages {
stage("Build Init") {
steps {
// update the build name and description
buildName "${params.eNB_MR}"
buildDescription "Branch : ${params.eNB_Branch}"
}
}
stage ("Verify Parameters") {
steps {
script {
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
def allParametersPresent = true
// It is already to late to check it
if (params.pythonExecutor != null) {
echo "eNB CI executor node : ${pythonExecutor}"
}
// If not present picking a default Stage Name
if (params.pipelineTestStageName == null) {
// picking default
testStageName = 'Template Test Stage'
}
if (params.LockResources == null) {
echo "no LockResources given"
allParametersPresent = false
}
// 1st eNB parameters
if (params.eNB_IPAddress == null) {
allParametersPresent = false
}
if (params.eNB_SourceCodePath == null) {
allParametersPresent = false
}
if (params.eNB_Credentials == null) {
allParametersPresent = false
}
// 2nd eNB parameters
if (params.eNB1_IPAddress == null) {
allParametersPresent = false
}
if (params.eNB1_SourceCodePath == null) {
allParametersPresent = false
}
if (params.eNB1_Credentials == null) {
allParametersPresent = false
}
// 3rd eNB parameters
if (params.eNB2_IPAddress == null) {
allParametersPresent = false
}
if (params.eNB2_SourceCodePath == null) {
allParametersPresent = false
}
if (params.eNB2_Credentials == null) {
allParametersPresent = false
}
if (params.UE_IPAddress == null) {
allParametersPresent = false
}
if (params.UE_SourceCodePath == null) {
allParametersPresent = false
}
if (params.UE_Credentials == null) {
allParametersPresent = false
}
// the following 4 parameters should be pushed by the master trigger
// if not present, take the job GIT variables (used for developing)
if (params.eNB_Repository == null) {
eNB_Repository = env.GIT_URL
} else {
eNB_Repository = params.eNB_Repository
}
echo "eNB_Repository : ${eNB_Repository}"
if (params.eNB_Branch == null) {
eNB_Branch = env.GIT_BRANCH
} else {
eNB_Branch = params.eNB_Branch
}
echo "eNB_Branch : ${eNB_Branch}"
if (params.eNB_CommitID == null) {
eNB_CommitID = env.GIT_COMMIT
} else {
eNB_CommitID = params.eNB_CommitID
}
echo "eNB_CommitID : ${eNB_CommitID}"
if (params.eNB_mergeRequest!= null) {
eNB_AllowMergeRequestProcess = params.eNB_mergeRequest
if (eNB_AllowMergeRequestProcess) {
if (params.eNB_TargetBranch != null) {
eNB_TargetBranch = params.eNB_TargetBranch
} else {
eNB_TargetBranch = 'develop'
}
echo "eNB_TargetBranch : ${eNB_TargetBranch}"
}
}
if (params.EPC_IPAddress == null) {
allParametersPresent = false
}
if (params.EPC_Type == null) {
allParametersPresent = false
}
if (params.EPC_SourceCodePath == null) {
allParametersPresent = false
}
if (params.EPC_Credentials == null) {
allParametersPresent = false
}
if (allParametersPresent) {
echo "All parameters are present"
if (eNB_AllowMergeRequestProcess) {
sh "git fetch"
sh "./ci-scripts/doGitLabMerge.sh --src-branch ${eNB_Branch} --src-commit ${eNB_CommitID} --target-branch ${eNB_TargetBranch} --target-commit latest"
} else {
sh "git fetch"
sh "git checkout -f ${eNB_CommitID}"
}
} else {
echo "Some parameters are missing"
sh "./ci-scripts/fail.sh"
}
}
}
}
stage ("Build and Test") {
steps {
script {
dir ('ci-scripts') {
echo "\u2705 \u001B[32m${testStageName}\u001B[0m"
// If not present picking a default XML file
if (params.pythonTestXmlFile == null) {
// picking default
testXMLFile = 'xml_files/enb_usrpB210_band7_50PRB.xml'
echo "Test XML file(default): ${testXMLFile}"
mainPythonAllXmlFiles += "--XMLTestFile=" + testXMLFile + " "
} else {
String[] myXmlTestSuite = testXMLFile.split("\\r?\\n")
for (xmlFile in myXmlTestSuite) {
if (fileExists(xmlFile)) {
mainPythonAllXmlFiles += "--XMLTestFile=" + xmlFile + " "
echo "Test XML file : ${xmlFile}"
} else {
echo "Test XML file ${xmlFile}: no such file"
}
}
}
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB1_Credentials}", usernameVariable: 'eNB1_Username', passwordVariable: 'eNB1_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB2_Credentials}", usernameVariable: 'eNB2_Username', passwordVariable: 'eNB2_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'],
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.UE_Credentials}", usernameVariable: 'UE_Username', passwordVariable: 'UE_Password']
]) {
sh "python3 main.py --mode=InitiateHtml --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} ${mainPythonAllXmlFiles}"
String[] myXmlTestSuite = testXMLFile.split("\\r?\\n")
for (xmlFile in myXmlTestSuite) {
if (fileExists(xmlFile)) {
try {
timeout (time: 60, unit: 'MINUTES') {
sh "python3 main.py --mode=TesteNB --ranRepository=${eNB_Repository} --ranBranch=${eNB_Branch} --ranCommitID=${eNB_CommitID} --ranAllowMerge=${eNB_AllowMergeRequestProcess} --ranTargetBranch=${eNB_TargetBranch} --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath} --eNB1IPAddress=${params.eNB1_IPAddress} --eNB1UserName=${eNB1_Username} --eNB1Password=${eNB1_Password} --eNB1SourceCodePath=${params.eNB1_SourceCodePath} --eNB2IPAddress=${params.eNB2_IPAddress} --eNB2UserName=${eNB2_Username} --eNB2Password=${eNB2_Password} --eNB2SourceCodePath=${params.eNB2_SourceCodePath} --UEIPAddress=${params.UE_IPAddress} --UEUserName=${UE_Username} --UEPassword=${UE_Password} --UESourceCodePath=${params.UE_SourceCodePath} --EPCIPAddress=${params.EPC_IPAddress} --EPCType=${params.EPC_Type} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --XMLTestFile=${xmlFile}"
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
buildStageStatus = false
}
}
}
sh "python3 main.py --mode=FinalizeHtml --finalStatus=${buildStageStatus} --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --UEIPAddress=${params.UE_IPAddress} --UEUserName=${UE_Username} --UEPassword=${UE_Password}"
}
}
}
}
}
stage('Log Collection') {
parallel {
stage('Log Collection (eNB - Build)') {
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password']
]) {
echo '\u2705 \u001B[32mLog Collection (eNB - Build)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollectBuild --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath}"
echo '\u2705 \u001B[32mLog Transfer (eNB - Build)\u001B[0m'
sh "sshpass -p \'${eNB_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${eNB_Username}@${params.eNB_IPAddress}:${eNB_SourceCodePath}/cmake_targets/build.log.zip ./build.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("build.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "build.log.${env.BUILD_ID}.zip"
}
}
}
}
stage('Log Collection (OAI UE - Build)') {
steps {
echo '\u2705 \u001B[32mLog Collection (OAI UE - Build)\u001B[0m'
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.UE_Credentials}", usernameVariable: 'UE_Username', passwordVariable: 'UE_Password']
]) {
sh "python3 ci-scripts/main.py --mode=LogCollectBuild --UEIPAddress=${params.UE_IPAddress} --UEUserName=${UE_Username} --UEPassword=${UE_Password} --UESourceCodePath=${params.UE_SourceCodePath}"
echo '\u2705 \u001B[32mLog Transfer (UE - Build)\u001B[0m'
sh "sshpass -p \'${UE_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${UE_Username}@${params.UE_IPAddress}:${UE_SourceCodePath}/cmake_targets/build.log.zip ./build.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("build.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "build.log.${env.BUILD_ID}.zip"
}
}
}
}
stage('Log Collection (eNB - Run)') {
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.eNB_Credentials}", usernameVariable: 'eNB_Username', passwordVariable: 'eNB_Password']
]) {
echo '\u2705 \u001B[32mLog Collection (eNB - Run)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollecteNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password} --eNBSourceCodePath=${params.eNB_SourceCodePath} --BuildId=${env.BUILD_ID}"
echo '\u2705 \u001B[32mLog Transfer (eNB - Run)\u001B[0m'
sh "sshpass -p \'${eNB_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${eNB_Username}@${params.eNB_IPAddress}:${eNB_SourceCodePath}/cmake_targets/enb.log.zip ./enb.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("enb.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "enb.log.${env.BUILD_ID}.zip"
}
}
}
}
stage('Log Collection (OAI UE - Run)') {
steps {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.UE_Credentials}", usernameVariable: 'UE_Username', passwordVariable: 'UE_Password']
]) {
echo '\u2705 \u001B[32mLog Collection (OAI UE - Run)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollectOAIUE --UEIPAddress=${params.UE_IPAddress} --UEUserName=${UE_Username} --UEPassword=${UE_Password} --UESourceCodePath=${params.UE_SourceCodePath}"
echo '\u2705 \u001B[32mLog Transfer (OAI UE - Run)\u001B[0m'
sh "sshpass -p \'${UE_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${UE_Username}@${params.UE_IPAddress}:${UE_SourceCodePath}/cmake_targets/ue.log.zip ./ue.log.${env.BUILD_ID}.zip || true"
}
script {
if(fileExists("ue.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "ue.log.${env.BUILD_ID}.zip"
}
}
}
}
stage('Log Collection (CN)') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps {
script {
withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
]) {
echo '\u2705 \u001B[32mLog Collection (HSS)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollectHSS --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --EPCType=${params.EPC_Type}"
if (params.EPC_Type != 'OAICN5G') {
sh "sshpass -p \'${EPC_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${EPC_Username}@${params.EPC_IPAddress}:${EPC_SourceCodePath}/scripts/hss.log.zip ./hss.log.${env.BUILD_ID}.zip || true"
}
echo '\u2705 \u001B[32mLog Collection (MME or AMF)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollectMME --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --EPCType=${params.EPC_Type}"
if (params.EPC_Type == 'OAICN5G') {
sh "sshpass -p \'${EPC_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${EPC_Username}@${params.EPC_IPAddress}:${EPC_SourceCodePath}/scripts/mme.log.zip ./amf.log.${env.BUILD_ID}.zip || true"
} else {
sh "sshpass -p \'${EPC_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${EPC_Username}@${params.EPC_IPAddress}:${EPC_SourceCodePath}/scripts/mme.log.zip ./mme.log.${env.BUILD_ID}.zip || true"
}
echo '\u2705 \u001B[32mLog Collection (SPGW or SMF/UPF)\u001B[0m'
sh "python3 ci-scripts/main.py --mode=LogCollectSPGW --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCSourceCodePath=${params.EPC_SourceCodePath} --EPCType=${params.EPC_Type}"
if (params.EPC_Type == 'OAICN5G') {
sh "sshpass -p \'${EPC_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${EPC_Username}@${params.EPC_IPAddress}:${EPC_SourceCodePath}/scripts/spgw.log.zip ./smf-upf.log.${env.BUILD_ID}.zip || true"
} else {
sh "sshpass -p \'${EPC_Password}\' scp -o 'StrictHostKeyChecking no' -o 'ConnectTimeout 10' ${EPC_Username}@${params.EPC_IPAddress}:${EPC_SourceCodePath}/scripts/spgw.log.zip ./spgw.log.${env.BUILD_ID}.zip || true"
}
}
if(fileExists("hss.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "hss.log.${env.BUILD_ID}.zip"
}
if(fileExists("mme.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "mme.log.${env.BUILD_ID}.zip"
}
if(fileExists("spgw.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "spgw.log.${env.BUILD_ID}.zip"
}
if(fileExists("amf.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "amf.log.${env.BUILD_ID}.zip"
}
if(fileExists("smf-upf.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "smf-upf.log.${env.BUILD_ID}.zip"
}
echo '\u2705 \u001B[32mLog Collection for CoreNetwork Done!\u001B[0m'
}
}
}
}
}
}
post {
always {
script {
if(fileExists("ci-scripts/test_results.html")) {
sh "mv ci-scripts/test_results.html test_results-${JOB_NAME}.html"
sh "sed -i -e 's#TEMPLATE_JOB_NAME#${JOB_NAME}#' -e 's@build #TEMPLATE_BUILD_ID@build #${BUILD_ID}@' -e 's#Build-ID: TEMPLATE_BUILD_ID#Build-ID: <a href=\"${BUILD_URL}\">${BUILD_ID}</a>#' -e 's#TEMPLATE_STAGE_NAME#${testStageName}#' test_results-${JOB_NAME}.html"
archiveArtifacts "test_results-${JOB_NAME}.html"
}
if (params.pipelineZipsConsoleLog != null) {
if (params.pipelineZipsConsoleLog) {
echo "Archiving Jenkins console log"
sh "wget --no-check-certificate --no-proxy ${env.JENKINS_URL}/job/${env.JOB_NAME}/${env.BUILD_ID}/consoleText -O consoleText.log || true"
sh "zip -m consoleText.log.${env.BUILD_ID}.zip consoleText.log || true"
if(fileExists("consoleText.log.${env.BUILD_ID}.zip")) {
archiveArtifacts "consoleText.log.${env.BUILD_ID}.zip"
}
}
}
}
}
}
}