Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • pasolini/openairinterface5g
  • odukan/openairinterface5g
  • ewa/openairinterface5g
  • deksprime/openairinterface5g
  • jackokie/openairinterface5g
  • Srushti16/openairinterface5g
  • BRodolphe/openairinterface5g
  • kramantas/openairinterface5g
  • suraj_4g5g/openairinterface5g
  • turletti/openairinterface5g
  • anandriisc/openairinterface5g
  • lvguorong/openairinterface5g
  • dast/openairinterface5g
  • yashwanthr/openairinterface5g
  • ajiti2tb/openairinterface5g
  • qzhou/openairinterface5g
  • nickmxxx/openairinterface5g
  • bin_he4/openairinterface5g
  • delarco/openairinterface5g
  • limx1980/openairinterface5g
  • Aniq/openairinterface5g
  • yassir63/openairinterface5g
  • orc318/openairinterface5g
  • vader/openairinterface5g
  • limx59/openairinterface5g
  • nadavaati_12345/openairinterface5g
  • jenshz/openairinterface5g
  • kuldeep/openairinterface5g
  • lurker/openairinterface5g
  • shariat/openairinterface5g
  • Alireza.najafzadeh/openairinterface5g
  • Ling/openairinterface5g
  • EvanKrall/openairinterface5g
  • youyih/openairinterface5g
  • anindya/openairinterface5g
  • ahan/openairinterface5g
  • beraoud/openairinterface5g
  • obejarano/openairinterface5g
  • Monti/openairinterface5g
  • akhamsi/openairinterface5g
  • Worker.N/openairinterface5g
  • zhangtu/openairinterface5g
  • desouza/openairinterface5g
  • zhijun/openairinterface5g
  • sureshkumar/openairinterface5g
  • milan/openairinterface5g
  • bigbangbingo/openairinterface5g
  • platini/openairinterface5g
  • muralir-nv/openairinterface5g
  • Joshua_Zhang/openairinterface5g
  • siddharthmurali1/openairinterface5g
  • sorinros/openairinterface5g
  • elainecao/openairinterface5g
  • sneltved/openairinterface5g
  • aikaterini.trilyraki/openairinterface5g
  • wujunning11/openairinterface5g
  • magounak/openairinterface5g
  • ycl1729020039/openairinterface5g
  • mayukhweb/openairinterface5g
  • wataru/openairinterface5g
  • afonsoli/openairinterface5g
  • ppokar/openairinterface5g
  • emest/openairinterface5g
  • Najib/openairinterface5g
  • liqing/openairinterface5g
  • gprshome/openairinterface5g
  • Dvevgedveccc/openairinterface5g
  • Elena_Lukashova/openairinterface5g
  • imaneouss/openairinterface5g
  • yangyuan/openairinterface5g
  • ycliang/openairinterface5g
  • rohanfds/openairinterface5g
  • cong2008abc/openairinterface5g
  • Giovanni/openairinterface5g
  • willvegapunk/openairinterface5g
  • Chen/openairinterface5g
  • Ella/openairinterface5g
  • kollabalu/openairinterface5g
  • tsaichanglan/openairinterface5g
  • Artifice/openairinterface5g
  • HJR0129/openairinterface5g
  • alextp/openairinterface5g
  • Changron/openairinterface5g
  • pedosb/openairinterface5g
  • Flozzen/openairinterface5g
  • hobei/openairinterface5g
  • WP_Jing/openairinterface5g
  • reset4/openairinterface5g
  • alexjoseph/openairinterface5g
  • latuan1710/openairinterface5g
  • wynter-wang/openairinterface5g
  • stt12706/openairinterface5g
  • sy/openairinterface5g
  • dzxu/openairinterface5g
  • ptizoom/openairinterface5g
  • Thierry/openairinterface5g
  • tjamc80/openairinterface5g
  • yenmuse/openairinterface5g
  • archerling/openairinterface5g
  • grahul/openairinterface5g
  • ashish.shri/openairinterface5g
  • TianyuChen/openairinterface5g
  • cuixf1/openairinterface5g
  • Jan/openairinterface5g
  • jboatenng/openairinterface5g_gpio
  • geokal/openairinterface5g
  • johannhg/openairinterface5g
  • TofunmiA/openairinterface5g
  • razvanursu/openairinterface5g-mac-scheduling
  • Julio/openairinterface5g
  • fredrichx/openairinterface5g
  • nems/openairinterface5g
  • wb_li/openairinterface5g
  • ferrieux/openairinterface5g
  • prajna_g/openairinterface-5-g-xnap-ho
  • mtinasc/openairinterface5g
  • Hofschroeer/openairinterface5g
  • buptxiaofeng/openairinterface5g
  • fjgh_759/openairinterface5g
  • calcel/openairinterface5g
  • Reem/openairinterface5g
  • havar_mind/openairinterface5g
  • shrinish/openairinterface5g
  • YANGHELINDE/openairinterface5g
  • lool/openairinterface5g
  • raghav1900/openairinterface5g
  • allan1201/openairinterface5g
  • ferris/openairinterface5g
  • seanzw/openairinterface5g
  • emad72/openairinterface5g
  • guojilong123/openairinterface5g
  • Rony99/openairinterface5g
  • lity/openairinterface5g
  • sshrivastava/openairinterface5g
  • zhihengzhang/openairinterface5g
  • Rakesh_B_B/openairinterface5g
  • baleeiro/openairinterface5g
  • 19125064/openairinterface5g
  • linlin/openairinterface5g
  • NA1VE/openairinterface5g
  • oai1B/openairinterface5g
  • daveprice/openairinterface5g
  • mo/openairinterface5g
  • dhanmeet/openairinterface5g
  • mv2290/openairinterface-5-g-test
  • pagmatt/openairinterface5g
  • mmTestNYU/openairinterface5g
  • mmezzavilla/openairinterface5g
  • sudhakarb/openairinterface5g
  • mekki/openairinterface5g
  • virtanen/openairinterface5g
  • dyyu/openairinterface5g
  • mohammed_safwan/openairinterface5g
  • venkat/openairinterface5g
  • rupadhya/openairinterface5g
  • adjou/openairinterface5g
  • samiemostafavi/openairinterface5g-edaf
  • Sreeram/openairinterface5g
  • oliverxsch/openairinterface5g
  • oai/openairinterface5g
160 results
Show changes
Showing
with 3288 additions and 1838 deletions
#this is a configuration file
#used to build real time processing statistics
#for 5G NR phy test (gNB terminate)
Title : Processing Time (us) from datalog_rt_stats.2x2.yaml
ColNames :
- Metric
- Average; Max; Count
- Average vs Reference Deviation (Reference Value; Acceptability Deviation Threshold)
Ref :
feprx : 120.0
feptx_prec : 30.0
feptx_ofdm : 50.0
feptx_total : 120.0
L1 Tx processing : 300.0
DLSCH encoding : 230.0
L1 Rx processing : 175.0
PUSCH inner-receiver : 100.0
Schedule Response : 3.0
DL & UL scheduling timing : 11.0
UL Indication : 3.0
Slot Indication : 14.0
DeviationThreshold :
feprx : 0.25
feptx_prec : 0.25
feptx_ofdm : 0.25
feptx_total : 0.25
L1 Tx processing : 0.25
DLSCH encoding : 0.25
L1 Rx processing : 0.25
PUSCH inner-receiver : 0.25
Schedule Response : 1.00
DL & UL scheduling timing : 0.50
UL Indication : 1.00
Slot Indication : 0.25
#this is a configuration file
#used to build real time processing statistics
#for 5G NR phy test (gNB terminate)
Title : Processing Time (us) from datalog_rt_stats.60.2x2.yaml
ColNames :
- Metric
- Average; Max; Count
- Average vs Reference Deviation (Reference Value; Acceptability Deviation Threshold)
Ref :
feprx : 75.0
feptx_prec : 14.0
feptx_ofdm : 30.0
feptx_total : 80.0
L1 Tx processing : 315.0
DLSCH encoding : 155.0
L1 Rx processing : 345.0
PUSCH inner-receiver : 155.0
Schedule Response : 3.0
DL & UL scheduling timing : 13.0
UL Indication : 3.0
Slot Indication : 12.0
DeviationThreshold :
feprx : 0.25
feptx_prec : 0.25
feptx_ofdm : 0.25
feptx_total : 0.25
L1 Tx processing : 0.25
DLSCH encoding : 0.25
L1 Rx processing : 0.25
PUSCH inner-receiver : 0.25
Schedule Response : 1.00
DL & UL scheduling timing : 0.35
UL Indication : 1.00
Slot Indication : 0.35
#this is a configuration file
#used to build real time processing statistics
#for 5G NR phy test (gNB terminate)
Title : Processing Time (us) from datalog_rt_stats.default.yaml
ColNames :
- Metric
- Average; Max; Count
- Average vs Reference Deviation (Reference Value; Acceptability Deviation Threshold)
Ref :
feprx : 40.0
feptx_prec : 13.0
feptx_ofdm : 30.0
feptx_total : 45.0
L1 Tx processing : 160.0
DLSCH encoding : 100.0
L1 Rx processing : 290.0
PUSCH inner-receiver : 115.0
Schedule Response : 3.0
DL & UL scheduling timing : 6.0
UL Indication : 2.0
Slot Indication : 7.0
DeviationThreshold :
feprx : 0.25
feptx_prec : 0.25
feptx_ofdm : 0.25
feptx_total : 0.25
L1 Tx processing : 0.25
DLSCH encoding : 0.25
L1 Rx processing : 0.25
PUSCH inner-receiver : 0.25
Schedule Response : 1.00
DL & UL scheduling timing : 0.50
UL Indication : 1.00
Slot Indication : 0.50
#!/bin/bash
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
function usage {
echo "OAI VM Destroy script"
echo " Original Author: Raphael Defosseux"
echo ""
echo "Usage:"
echo "------"
echo " destroyAllRunningVM.sh [OPTIONS]"
echo ""
echo "Options:"
echo "--------"
echo " --job-name #### OR -jn ####"
echo " Specify the name of the Jenkins job."
echo ""
echo " --build-id #### OR -id ####"
echo " Specify the build ID of the Jenkins job."
echo ""
echo " --help OR -h"
echo " Print this help message."
echo ""
}
if [ $# -gt 4 ]
then
echo "Syntax Error: not the correct number of arguments"
echo ""
usage
exit 1
fi
VM_TEMPLATE=ci-
JOB_NAME=XX
BUILD_ID=XX
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-h|--help)
shift
usage
exit 0
;;
-jn|--job-name)
JOB_NAME="$2"
shift
shift
;;
-id|--build-id)
BUILD_ID="$2"
shift
shift
;;
*)
echo "Syntax Error: unknown option: $key"
echo ""
usage
exit 1
esac
done
if [ "$JOB_NAME" == "XX" ] || [ "$BUILD_ID" == "XX" ]
then
VM_TEMPLATE=ci-
else
VM_TEMPLATE=${JOB_NAME}-b${BUILD_ID}-
fi
LIST_CI_VM=`uvt-kvm list | grep $VM_TEMPLATE`
for CI_VM in $LIST_CI_VM
do
VM_IP_ADDR=`uvt-kvm ip $CI_VM`
echo "VM to destroy: $CI_VM -- IP $VM_IP_ADDR"
uvt-kvm destroy $CI_VM
ssh-keygen -R $VM_IP_ADDR
done
exit 0
#!/bin/bash
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
function usage {
echo "OAI GitLab merge request applying script"
......@@ -83,6 +103,11 @@ esac
done
if [[ $TARGET_COMMIT_ID == "latest" ]]
then
TARGET_COMMIT_ID=`git log -n1 --pretty=format:%H origin/$TARGET_BRANCH`
fi
echo "Source Branch is : $SOURCE_BRANCH"
echo "Source Commit ID is : $SOURCE_COMMIT_ID"
echo "Target Branch is : $TARGET_BRANCH"
......@@ -100,11 +125,19 @@ fi
git config user.email "jenkins@openairinterface.org"
git config user.name "OAI Jenkins"
git checkout -f $SOURCE_COMMIT_ID
git checkout -f $SOURCE_COMMIT_ID > checkout.txt 2>&1
STATUS=`grep -E -c "fatal: reference is not a tree" checkout.txt`
rm -f checkout.txt
if [ $STATUS -ne 0 ]
then
echo "fatal: reference is not a tree --> $SOURCE_COMMIT_ID"
STATUS=-1
exit $STATUS
fi
git merge --ff $TARGET_COMMIT_ID -m "Temporary merge for CI"
STATUS=`git status | egrep -c "You have unmerged paths.|fix conflicts"`
STATUS=`git status | grep -E -c "You have unmerged paths.|fix conflicts"`
if [ $STATUS -ne 0 ]
then
echo "There are merge conflicts.. Cannot perform further build tasks"
......
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
#
# Dockerfile for the Open-Air-Interface BUILD service
# Valid for Ubuntu 22.04
#
#---------------------------------------------------------------------
FROM ubuntu:xenial AS oai-cppcheck
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get upgrade --yes && \
DEBIAN_FRONTEND=noninteractive apt-get install --yes \
build-essential \
vim \
cppcheck
WORKDIR /oai-ran
COPY . .
WORKDIR /oai-ran/common/utils/T
RUN make
WORKDIR /oai-ran
RUN mkdir -p cmake_targets/log && \
cppcheck --enable=warning --force --xml --xml-version=2 \
--inline-suppr \
-i openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_decoder.c \
--suppressions-list=ci-scripts/cppcheck_suppressions.list \
-I common/utils \
-I openair3/NAS/COMMON/UTIL \
-j`nproc` . 2> cmake_targets/log/cppcheck.xml 1> cmake_targets/log/cppcheck_build.txt
RUN grep -E -c 'severity="error' cmake_targets/log/cppcheck.xml
RUN grep -E -c 'severity="warning' cmake_targets/log/cppcheck.xml
RUN cat cmake_targets/log/cppcheck.xml
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
#
# Dockerfile for the Open-Air-Interface BUILD service
# Valid for Ubuntu 22.04
#
#---------------------------------------------------------------------
FROM ubuntu:bionic AS oai-formatting-check
ARG MERGE_REQUEST
ARG SRC_BRANCH
ARG TARGET_BRANCH
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get upgrade --yes && \
DEBIAN_FRONTEND=noninteractive apt-get install --yes \
gawk \
git
WORKDIR /oai-ran
COPY . .
RUN /bin/bash -c "if [[ -v MERGE_REQUEST ]]; then echo 'Source Branch = $SRC_BRANCH'; echo 'Target Branch = $TARGET_BRANCH'; else echo 'Push to develop'; fi"
RUN /bin/bash -c "if [[ -v MERGE_REQUEST ]]; then ./ci-scripts/checkCodingFormattingRules.sh --src-branch $SRC_BRANCH --target-branch $TARGET_BRANCH; else ./ci-scripts/checkCodingFormattingRules.sh; fi"
RUN echo "=== Files with incorrect define protection ===" && \
/bin/bash -c "if [[ -f header-files-w-incorrect-define.txt ]]; then cat header-files-w-incorrect-define.txt; fi"
RUN echo "=== Files with a GNU GPL licence Banner ===" && \
/bin/bash -c "if [[ -f files-w-gnu-gpl-license-banner.txt ]]; then cat files-w-gnu-gpl-license-banner.txt; fi"
RUN echo "=== Files with a suspect Banner ===" && \
/bin/bash -c "if [[ -f files-w-suspect-banner.txt ]]; then cat files-w-suspect-banner.txt; fi"
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
#
# Dockerfile for the Open-Air-Interface BUILD service
# Valid for Ubuntu 22.04
#
#---------------------------------------------------------------------
FROM ran-base:develop AS ran-tests
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get upgrade --yes && \
DEBIAN_FRONTEND=noninteractive apt-get install --yes \
libgtest-dev \
libyaml-cpp-dev
RUN rm -Rf /oai-ran
WORKDIR /oai-ran
COPY . .
WORKDIR /oai-ran/build
RUN cmake -GNinja -DENABLE_TESTS=ON -DCMAKE_BUILD_TYPE=Debug -DSANITIZE_ADDRESS=True .. && ninja tests
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
# Python for CI of OAI-eNB + COTS-UE
#
# Required Python Version
# Python 3.x
#
# Required Python Package
# pexpect
#---------------------------------------------------------------------
#-----------------------------------------------------------
# Import
#-----------------------------------------------------------
import sys # arg
import re # reg
import logging
import os
import time
import signal
#-----------------------------------------------------------
# OAI Testing modules
#-----------------------------------------------------------
import sshconnection as SSH
import helpreadme as HELP
import constants as CONST
import cls_cluster as OC
import cls_cmd
import cls_module
#-----------------------------------------------------------
# Class Declaration
#-----------------------------------------------------------
class EPCManagement():
def __init__(self):
self.IPAddress = ''
self.UserName = ''
self.Password = ''
self.SourceCodePath = ''
self.Type = ''
self.PcapFileName = ''
self.testCase_id = ''
self.MmeIPAddress = ''
self.containerPrefix = 'prod'
self.mmeConfFile = 'mme.conf'
self.yamlPath = ''
self.isMagmaUsed = False
self.cfgDeploy = '--type start-mini --scenario 1 --capture /tmp/oai-cn5g-v1.5.pcap' #from xml, 'mini' is default normal for docker-network.py
self.cfgUnDeploy = '--type stop-mini --scenario 1' #from xml, 'mini' is default normal for docker-network.py
self.OCUrl = "https://api.oai.cs.eurecom.fr:6443"
self.OCRegistry = "default-route-openshift-image-registry.apps.oai.cs.eurecom.fr"
self.OCUserName = ''
self.OCPassword = ''
self.cnID = ''
self.imageToPull = ''
self.eNBSourceCodePath = ''
#-----------------------------------------------------------
# EPC management functions
#-----------------------------------------------------------
def InitializeHSS(self, HTML):
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.SourceCodePath == '' or self.Type == '':
HELP.GenericHelp(CONST.Version)
HELP.EPCSrvHelp(self.IPAddress, self.UserName, self.Password, self.SourceCodePath, self.Type)
sys.exit('Insufficient EPC Parameters')
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 Cassandra-based HSS in Docker')
mySSH.command('if [ -d ' + self.SourceCodePath + '/scripts ]; then echo ' + self.Password + ' | sudo -S rm -Rf ' + self.SourceCodePath + '/scripts ; fi', '\$', 5)
mySSH.command('mkdir -p ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-hss /bin/bash -c "nohup tshark -i eth0 -i eth1 -w /tmp/hss_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
time.sleep(5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-hss /bin/bash -c "nohup ./bin/oai_hss -j ./etc/hss_rel14.json --reloadkey true > hss_check_run.log 2>&1"', '\$', 5)
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 Cassandra-based HSS')
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
logging.debug('\u001B[1m Launching tshark on all interfaces \u001B[0m')
self.PcapFileName = 'epc_' + self.testCase_id + '.pcap'
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f ' + self.PcapFileName, '\$', 5)
mySSH.command('echo $USER; nohup sudo tshark -f "tcp port not 22 and port not 53" -i any -w ' + self.SourceCodePath + '/scripts/' + self.PcapFileName + ' > /tmp/tshark.log 2>&1 &', self.UserName, 5)
mySSH.command('echo ' + self.Password + ' | sudo -S mkdir -p logs', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f hss_' + self.testCase_id + '.log logs/hss*.*', '\$', 5)
mySSH.command('echo "oai_hss -j /usr/local/etc/oai/hss_rel14.json" > ./my-hss.sh', '\$', 5)
mySSH.command('chmod 755 ./my-hss.sh', '\$', 5)
mySSH.command('sudo daemon --unsafe --name=hss_daemon --chdir=' + self.SourceCodePath + '/scripts -o ' + self.SourceCodePath + '/scripts/hss_' + self.testCase_id + '.log ./my-hss.sh', '\$', 5)
elif re.match('OAI', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC HSS')
mySSH.command('cd ' + self.SourceCodePath, '\$', 5)
mySSH.command('source oaienv', '\$', 5)
mySSH.command('cd scripts', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./run_hss 2>&1 | stdbuf -o0 awk \'{ print strftime("[%Y/%m/%d %H:%M:%S] ",systime()) $0 }\' | stdbuf -o0 tee -a hss_' + self.testCase_id + '.log &', 'Core state: 2 -> 3', 35)
elif re.match('ltebox', self.Type, re.IGNORECASE):
logging.debug('Using the ltebox simulated HSS')
mySSH.command('if [ -d ' + self.SourceCodePath + '/scripts ]; then echo ' + self.Password + ' | sudo -S rm -Rf ' + self.SourceCodePath + '/scripts ; fi', '\$', 5)
mySSH.command('mkdir -p ' + self.SourceCodePath + '/scripts', '\$', 5)
result = re.search('hss_sim s6as diam_hss', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall hss_sim', '\$', 5)
mySSH.command('ps aux | grep --colour=never xGw | grep -v grep', '\$', 5, silent=True)
result = re.search('root.*xGw', mySSH.getBefore())
if result is not None:
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./stop_ltebox', '\$', 5)
mySSH.command('cd /opt/hss_sim0609', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f hss.log', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S echo "Starting sudo session" && sudo su -c "screen -dm -S simulated_hss ./starthss"', '\$', 5)
else:
logging.error('This option should not occur!')
mySSH.close()
HTML.CreateHtmlTestRow(self.Type, 'OK', CONST.ALL_PROCESSES_OK)
return True
def InitializeMME(self, HTML):
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.SourceCodePath == '' or self.Type == '':
HELP.GenericHelp(CONST.Version)
HELP.EPCSrvHelp(self.IPAddress, self.UserName, self.Password, self.SourceCodePath, self.Type)
sys.exit('Insufficient EPC Parameters')
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 MME in Docker')
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-mme /bin/bash -c "nohup tshark -i eth0 -i lo:s10 -f "not port 2152" -w /tmp/mme_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
time.sleep(5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-mme /bin/bash -c "nohup ./bin/oai_mme -c ./etc/' + self.mmeConfFile + ' > mme_check_run.log 2>&1"', '\$', 5)
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 MME')
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f mme_' + self.testCase_id + '.log', '\$', 5)
mySSH.command('echo "./run_mme --config-file /usr/local/etc/oai/mme.conf --set-virt-if" > ./my-mme.sh', '\$', 5)
mySSH.command('chmod 755 ./my-mme.sh', '\$', 5)
mySSH.command('sudo daemon --unsafe --name=mme_daemon --chdir=' + self.SourceCodePath + '/scripts -o ' + self.SourceCodePath + '/scripts/mme_' + self.testCase_id + '.log ./my-mme.sh', '\$', 5)
elif re.match('OAI', self.Type, re.IGNORECASE):
mySSH.command('cd ' + self.SourceCodePath, '\$', 5)
mySSH.command('source oaienv', '\$', 5)
mySSH.command('cd scripts', '\$', 5)
mySSH.command('stdbuf -o0 hostname', '\$', 5)
result = re.search('hostname\\\\r\\\\n(?P<host_name>[a-zA-Z0-9\-\_]+)\\\\r\\\\n', mySSH.getBefore())
if result is None:
logging.debug('\u001B[1;37;41m Hostname Not Found! \u001B[0m')
sys.exit(1)
host_name = result.group('host_name')
mySSH.command('echo ' + self.Password + ' | sudo -S ./run_mme 2>&1 | stdbuf -o0 tee -a mme_' + self.testCase_id + '.log &', 'MME app initialization complete', 100)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
# Clean-up the logs from previous runs
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f ../var/log/*.0', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./start_mme', '\$', 5)
else:
logging.error('This option should not occur!')
mySSH.close()
HTML.CreateHtmlTestRow(self.Type, 'OK', CONST.ALL_PROCESSES_OK)
return True
def SetMmeIPAddress(self):
# Not an error if we don't need an EPC
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.SourceCodePath == '' or self.Type == '':
return
if self.IPAddress == 'none':
return
# Only in case of Docker containers, MME IP address is not the EPC HOST IP address
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
self.isMagmaUsed = False
mySSH.command('docker ps -a', '\$', 5)
result = re.search('magma', mySSH.getBefore())
if result is not None:
self.isMagmaUsed = True
if self.isMagmaUsed:
mySSH.command('docker inspect --format="MME_IP_ADDR = {{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}" ' + self.containerPrefix + '-magma-mme', '\$', 5)
else:
mySSH.command('docker inspect --format="MME_IP_ADDR = {{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}" ' + self.containerPrefix + '-oai-mme', '\$', 5)
result = re.search('MME_IP_ADDR = (?P<mme_ip_addr>[0-9\.]+)', mySSH.getBefore())
if result is not None:
self.MmeIPAddress = result.group('mme_ip_addr')
logging.debug('MME IP Address is ' + self.MmeIPAddress)
mySSH.close()
else:
self.MmeIPAddress = self.IPAddress
def InitializeSPGW(self, HTML):
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.SourceCodePath == '' or self.Type == '':
HELP.GenericHelp(CONST.Version)
HELP.EPCSrvHelp(self.IPAddress, self.UserName, self.Password, self.SourceCodePath, self.Type)
sys.exit('Insufficient EPC Parameters')
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 SPGW-CUPS in Docker')
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "nohup tshark -i eth0 -i lo:p5c -i lo:s5c -f "not port 2152" -w /tmp/spgwc_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwu-tiny /bin/bash -c "nohup tshark -i eth0 -f "not port 2152" -w /tmp/spgwu_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
time.sleep(5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "nohup ./bin/oai_spgwc -o -c ./etc/spgw_c.conf > spgwc_check_run.log 2>&1"', '\$', 5)
time.sleep(5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwu-tiny /bin/bash -c "nohup ./bin/oai_spgwu -o -c ./etc/spgw_u.conf > spgwu_check_run.log 2>&1"', '\$', 5)
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 SPGW-CUPS')
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f spgwc_' + self.testCase_id + '.log spgwu_' + self.testCase_id + '.log', '\$', 5)
mySSH.command('echo "spgwc -c /usr/local/etc/oai/spgw_c.conf" > ./my-spgwc.sh', '\$', 5)
mySSH.command('chmod 755 ./my-spgwc.sh', '\$', 5)
mySSH.command('sudo daemon --unsafe --name=spgwc_daemon --chdir=' + self.SourceCodePath + '/scripts -o ' + self.SourceCodePath + '/scripts/spgwc_' + self.testCase_id + '.log ./my-spgwc.sh', '\$', 5)
time.sleep(5)
mySSH.command('echo "spgwu -c /usr/local/etc/oai/spgw_u.conf" > ./my-spgwu.sh', '\$', 5)
mySSH.command('chmod 755 ./my-spgwu.sh', '\$', 5)
mySSH.command('sudo daemon --unsafe --name=spgwu_daemon --chdir=' + self.SourceCodePath + '/scripts -o ' + self.SourceCodePath + '/scripts/spgwu_' + self.testCase_id + '.log ./my-spgwu.sh', '\$', 5)
elif re.match('OAI', self.Type, re.IGNORECASE):
mySSH.command('cd ' + self.SourceCodePath, '\$', 5)
mySSH.command('source oaienv', '\$', 5)
mySSH.command('cd scripts', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./run_spgw 2>&1 | stdbuf -o0 tee -a spgw_' + self.testCase_id + '.log &', 'Initializing SPGW-APP task interface: DONE', 30)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./start_xGw', '\$', 5)
else:
logging.error('This option should not occur!')
mySSH.close()
HTML.CreateHtmlTestRow(self.Type, 'OK', CONST.ALL_PROCESSES_OK)
return True
def Initialize5GCN(self, HTML):
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.Type == '':
HELP.GenericHelp(CONST.Version)
HELP.EPCSrvHelp(self.IPAddress, self.UserName, self.Password, self.Type)
logging.error('Insufficient EPC Parameters')
return False
mySSH = cls_cmd.getConnection(self.IPAddress)
html_cell = ''
if re.match('ltebox', self.Type, re.IGNORECASE):
logging.debug('Using the SABOX simulated HSS')
mySSH.command('if [ -d ' + self.SourceCodePath + '/scripts ]; then echo ' + self.Password + ' | sudo -S rm -Rf ' + self.SourceCodePath + '/scripts ; fi', '\$', 5)
mySSH.command('mkdir -p ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('cd /opt/hss_sim0609', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f hss.log', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S echo "Starting sudo session" && sudo su -c "screen -dm -S simulated_5g_hss ./start_5g_hss"', '\$', 5)
logging.debug('Using the sabox')
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./start_sabox', '\$', 5)
html_cell += 'N/A\n'
elif re.match('OAICN5G', self.Type, re.IGNORECASE):
logging.debug('Starting OAI CN5G')
mySSH.command('if [ -d ' + self.SourceCodePath + '/scripts ]; then echo ' + self.Password + ' | sudo -S rm -Rf ' + self.SourceCodePath + '/scripts ; fi', '\$', 5)
mySSH.command('mkdir -p ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command(f'cd {self.SourceCodePath}/docker-compose', '\$', 5)
mySSH.command('python3 ./core-network.py '+self.cfgDeploy, '\$', 60)
if re.search('start-mini-as-ue', self.cfgDeploy):
dFile = 'docker-compose-mini-nrf-asue.yaml'
elif re.search('basic', self.cfgDeploy):\
dFile = 'docker-compose-basic-nrf.yaml'
else:
dFile = 'docker-compose-mini-nrf.yaml'
mySSH.command('docker-compose -f ' + dFile + ' ps -a', '\$', 60)
if mySSH.getBefore().count('Up (healthy)') != 6:
logging.error('Not all container healthy')
else:
logging.debug('OK --> all containers are healthy')
mySSH.command('docker-compose -f ' + dFile + ' config | grep --colour=never image', '\$', 10)
listOfImages = mySSH.getBefore()
for imageLine in listOfImages.split('\\r\\n'):
res1 = re.search('image: (?P<name>[a-zA-Z0-9\-/]+):(?P<tag>[a-zA-Z0-9\-]+)', str(imageLine))
res2 = re.search('mysql', str(imageLine))
if res1 is not None and res2 is None:
html_cell += res1.group('name') + ':' + res1.group('tag') + ' '
nbChars = len(res1.group('name')) + len(res1.group('tag')) + 2
while (nbChars < 32):
html_cell += ' '
nbChars += 1
mySSH.command('docker image inspect --format="Size = {{.Size}} bytes" ' + res1.group('name') + ':' + res1.group('tag'), '\$', 10)
res3 = re.search('Size *= *(?P<size>[0-9\-]*) *bytes', mySSH.getBefore())
if res3 is not None:
imageSize = int(res3.group('size'))
imageSize = int(imageSize/(1024*1024))
html_cell += str(imageSize) + ' MBytes '
mySSH.command('docker image inspect --format="Date = {{.Created}}" ' + res1.group('name') + ':' + res1.group('tag'), '\$', 10)
res4 = re.search('Date *= *(?P<date>[0-9\-]*)T', mySSH.getBefore())
if res4 is not None:
html_cell += '(' + res4.group('date') + ')'
html_cell += '\n'
elif re.match('OC-OAI-CN5G', self.Type, re.IGNORECASE):
cn = cls_module.Module_UE(self.cnID)
succeeded, report = OC.OC_deploy_CN(mySSH, self.OCUserName, self.OCPassword, cn.getNamespace(), cn.getCNPath())
if not succeeded:
HTML.CreateHtmlTestRow('N/A', 'KO', report)
HTML.CreateHtmlTabFooter(False)
mySSH.close()
logging.error("OC OAI CN5G: CN deployment failed!")
return False
for line in report.stdout.split('\n')[1:]:
columns = line.strip().split()
name = columns[0]
status = columns[2]
html_cell += status + ' ' + name
html_cell += '\n'
else:
logging.error('This option should not occur!')
mySSH.close()
HTML.CreateHtmlTestRowQueue(self.Type, 'OK', [html_cell])
return True
def SetAmfIPAddress(self):
# Not an error if we don't need an 5GCN
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.SourceCodePath == '' or self.Type == '':
return
if self.IPAddress == 'none':
return
if re.match('ltebox', self.Type, re.IGNORECASE):
self.MmeIPAddress = self.IPAddress
elif re.match('OAICN5G', self.Type, re.IGNORECASE):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
response=mySSH.command3('docker container ls -f name=oai-amf', 10)
if len(response)>1:
response=mySSH.command3('docker inspect --format=\'{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}\' oai-amf', 10)
tmp = str(response[0],'utf-8')
self.MmeIPAddress = tmp.rstrip()
logging.debug('AMF IP Address ' + self.MmeIPAddress)
else:
logging.error('no container with name oai-amf found, could not retrieve AMF IP address')
mySSH.close()
elif re.match('OC-OAI-CN5G', self.Type, re.IGNORECASE):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
response=mySSH.command3('oc pods ls -f name=oai-amf', 10)
def CheckHSSProcess(self, status_queue):
try:
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker top ' + self.containerPrefix + '-oai-hss', '\$', 5)
else:
mySSH.command('stdbuf -o0 ps -aux | grep --color=never hss | grep -v grep', '\$', 5)
if re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE) or re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
result = re.search('oai_hss -j', mySSH.getBefore())
elif re.match('OAI', self.Type, re.IGNORECASE):
result = re.search('\/bin\/bash .\/run_', mySSH.getBefore())
elif re.match('ltebox', self.Type, re.IGNORECASE):
result = re.search('hss_sim s6as diam_hss', mySSH.getBefore())
else:
logging.error('This should not happen!')
if result is None:
logging.debug('\u001B[1;37;41m HSS Process Not Found! \u001B[0m')
status_queue.put(CONST.HSS_PROCESS_FAILED)
else:
status_queue.put(CONST.HSS_PROCESS_OK)
mySSH.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def CheckMMEProcess(self, status_queue):
try:
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
self.isMagmaUsed = False
mySSH.command('docker ps -a', '\$', 5)
result = re.search('magma', mySSH.getBefore())
if result is not None:
self.isMagmaUsed = True
if self.isMagmaUsed:
mySSH.command('docker top ' + self.containerPrefix + '-magma-mme', '\$', 5)
else:
mySSH.command('docker top ' + self.containerPrefix + '-oai-mme', '\$', 5)
else:
mySSH.command('stdbuf -o0 ps -aux | grep --color=never mme | grep -v grep', '\$', 5)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
result = re.search('oai_mme -c ', mySSH.getBefore())
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
result = re.search('mme -c', mySSH.getBefore())
elif re.match('OAI', self.Type, re.IGNORECASE):
result = re.search('\/bin\/bash .\/run_', mySSH.getBefore())
elif re.match('ltebox', self.Type, re.IGNORECASE):
result = re.search('mme|amf', mySSH.getBefore())
else:
logging.error('This should not happen!')
if result is None:
logging.debug('\u001B[1;37;41m MME|AMF Process Not Found! \u001B[0m')
status_queue.put(CONST.MME_PROCESS_FAILED)
else:
status_queue.put(CONST.MME_PROCESS_OK)
mySSH.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def CheckSPGWProcess(self, status_queue):
try:
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker top ' + self.containerPrefix + '-oai-spgwc', '\$', 5)
result = re.search('oai_spgwc -', mySSH.getBefore())
if result is not None:
mySSH.command('docker top ' + self.containerPrefix + '-oai-spgwu-tiny', '\$', 5)
result = re.search('oai_spgwu -', mySSH.getBefore())
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('stdbuf -o0 ps -aux | grep --color=never spgw | grep -v grep', '\$', 5)
result = re.search('spgwu -c ', mySSH.getBefore())
elif re.match('OAI', self.Type, re.IGNORECASE):
mySSH.command('stdbuf -o0 ps -aux | grep --color=never spgw | grep -v grep', '\$', 5)
result = re.search('\/bin\/bash .\/run_', mySSH.getBefore())
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('stdbuf -o0 ps -aux | grep --color=never xGw | grep -v grep', '\$', 5)
result = re.search('xGw|upf', mySSH.getBefore())
else:
logging.error('This should not happen!')
if result is None:
logging.debug('\u001B[1;37;41m SPGW|UPF Process Not Found! \u001B[0m')
status_queue.put(CONST.SPGW_PROCESS_FAILED)
else:
status_queue.put(CONST.SPGW_PROCESS_OK)
mySSH.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def TerminateHSS(self, HTML):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-hss /bin/bash -c "killall --signal SIGINT oai_hss tshark"', '\$', 5)
time.sleep(2)
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-hss /bin/bash -c "ps aux | grep oai_hss"', '\$', 5)
result = re.search('oai_hss -j ', mySSH.getBefore())
if result is not None:
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-hss /bin/bash -c "killall --signal SIGKILL oai_hss"', '\$', 5)
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGINT oai_hss || true', '\$', 5)
time.sleep(2)
mySSH.command('stdbuf -o0 ps -aux | grep --colour=never hss | grep -v grep', '\$', 5)
result = re.search('oai_hss -j', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGKILL oai_hss || true', '\$', 5)
mySSH.command('rm -f ' + self.SourceCodePath + '/scripts/my-hss.sh', '\$', 5)
elif re.match('OAI', self.Type, re.IGNORECASE):
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGINT run_hss oai_hss || true', '\$', 5)
time.sleep(2)
mySSH.command('stdbuf -o0 ps -aux | grep --colour=never hss | grep -v grep', '\$', 5)
result = re.search('\/bin\/bash .\/run_', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGKILL run_hss oai_hss || true', '\$', 5)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cd ' + self.SourceCodePath, '\$', 5)
mySSH.command('cd scripts', '\$', 5)
time.sleep(1)
mySSH.command('echo ' + self.Password + ' | sudo -S screen -S simulated_hss -X quit', '\$', 5)
time.sleep(5)
mySSH.command('ps aux | grep --colour=never hss_sim | grep -v grep', '\$', 5, silent=True)
result = re.search('hss_sim s6as diam_hss', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall hss_sim', '\$', 5)
else:
logging.error('This should not happen!')
mySSH.close()
HTML.CreateHtmlTestRow('N/A', 'OK', CONST.ALL_PROCESSES_OK)
return True
def TerminateMME(self, HTML):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-mme /bin/bash -c "killall --signal SIGINT oai_mme tshark"', '\$', 5)
time.sleep(2)
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-mme /bin/bash -c "ps aux | grep oai_mme"', '\$', 5)
result = re.search('oai_mme -c ', mySSH.getBefore())
if result is not None:
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-mme /bin/bash -c "killall --signal SIGKILL oai_mme"', '\$', 5)
elif re.match('OAI', self.Type, re.IGNORECASE) or re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGINT run_mme mme || true', '\$', 5)
time.sleep(2)
mySSH.command('stdbuf -o0 ps -aux | grep mme | grep -v grep', '\$', 5)
result = re.search('mme -c', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGKILL run_mme mme || true', '\$', 5)
mySSH.command('rm -f ' + self.SourceCodePath + '/scripts/my-mme.sh', '\$', 5)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./stop_mme', '\$', 5)
time.sleep(5)
else:
logging.error('This should not happen!')
mySSH.close()
HTML.CreateHtmlTestRow('N/A', 'OK', CONST.ALL_PROCESSES_OK)
return True
def TerminateSPGW(self, HTML):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "killall --signal SIGINT oai_spgwc tshark"', '\$', 5)
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-spgwu-tiny /bin/bash -c "killall --signal SIGINT oai_spgwu tshark"', '\$', 5)
time.sleep(2)
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "ps aux | grep oai_spgwc"', '\$', 5)
result = re.search('oai_spgwc -o -c ', mySSH.getBefore())
if result is not None:
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "killall --signal SIGKILL oai_spgwc"', '\$', 5)
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-spgwu-tiny /bin/bash -c "ps aux | grep oai_spgwu"', '\$', 5)
result = re.search('oai_spgwu -o -c ', mySSH.getBefore())
if result is not None:
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-spgwu-tiny /bin/bash -c "killall --signal SIGKILL oai_spgwu"', '\$', 5)
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGINT spgwc spgwu || true', '\$', 5)
time.sleep(2)
mySSH.command('stdbuf -o0 ps -aux | grep spgw | grep -v grep', '\$', 5)
result = re.search('spgwc -c |spgwu -c ', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGKILL spgwc spgwu || true', '\$', 5)
mySSH.command('rm -f ' + self.SourceCodePath + '/scripts/my-spgw*.sh', '\$', 5)
mySSH.command('stdbuf -o0 ps -aux | grep tshark | grep -v grep', '\$', 5)
result = re.search('-w ', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGINT tshark || true', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S chmod 666 ' + self.SourceCodePath + '/scripts/*.pcap', '\$', 5)
elif re.match('OAI', self.Type, re.IGNORECASE):
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGINT run_spgw spgw || true', '\$', 5)
time.sleep(2)
mySSH.command('stdbuf -o0 ps -aux | grep spgw | grep -v grep', '\$', 5)
result = re.search('\/bin\/bash .\/run_', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGKILL run_spgw spgw || true', '\$', 5)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./stop_xGw', '\$', 5)
else:
logging.error('This should not happen!')
mySSH.close()
HTML.CreateHtmlTestRow('N/A', 'OK', CONST.ALL_PROCESSES_OK)
return True
def Terminate5GCN(self, HTML):
mySSH = cls_cmd.getConnection(self.IPAddress)
message = ''
if re.match('ltebox', self.Type, re.IGNORECASE):
logging.debug('Terminating SA BOX')
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./stop_sabox', '\$', 5)
time.sleep(1)
mySSH.command('cd ' + self.SourceCodePath, '\$', 5)
mySSH.command('cd scripts', '\$', 5)
time.sleep(1)
mySSH.command('echo ' + self.Password + ' | sudo -S screen -S simulated_5g_hss -X quit', '\$', 5)
elif re.match('OAICN5G', self.Type, re.IGNORECASE):
logging.debug('OAI CN5G Collecting Log files to workspace')
mySSH.command('echo ' + self.Password + ' | sudo rm -rf ' + self.SourceCodePath + '/logs', '\$', 5)
mySSH.command('mkdir ' + self.SourceCodePath + '/logs','\$', 5)
containers_list=['oai-smf','oai-spgwu','oai-amf','oai-nrf']
for c in containers_list:
mySSH.command('docker logs ' + c + ' > ' + self.SourceCodePath + '/logs/' + c + '.log', '\$', 5)
logging.debug('Terminating OAI CN5G')
mySSH.command(f'cd {self.SourceCodePath}/docker-compose', '\$', 5)
mySSH.command('python3 ./core-network.py '+self.cfgUnDeploy, '\$', 60)
mySSH.command('docker volume prune --force || true', '\$', 60)
time.sleep(2)
mySSH.command('tshark -r /tmp/oai-cn5g-v1.5.pcap | grep -E --colour=never "Tracking area update" ','\$', 30)
result = re.search('Tracking area update request', mySSH.getBefore())
if result is not None:
message = 'UE requested ' + str(mySSH.getBefore().count('Tracking area update request')) + 'Tracking area update request(s)'
else:
message = 'No Tracking area update request'
mySSH.run(f'cd {self.SourceCodePath}/logs && zip -r -qq test_logs_CN.zip *.log')
mySSH.copyin(f'{self.SourceCodePath}/logs/test_logs_CN.zip','test_logs_CN.zip')
logging.debug(message)
elif re.match('OC-OAI-CN5G', self.Type, re.IGNORECASE):
cn = cls_module.Module_UE(self.cnID)
succeeded, report = OC.OC_undeploy_CN(mySSH, self.OCUserName, self.OCPassword, cn.getNamespace(), cn.getCNPath())
if not succeeded:
HTML.CreateHtmlTestRow('N/A', 'KO', report)
HTML.CreateHtmlTabFooter(False)
logging.error("OC OAI CN5G: CN undeployment failed!")
return False
else:
message = report.stdout
else:
logging.error('This should not happen!')
mySSH.close()
HTML.CreateHtmlTestRowQueue(self.Type, 'OK', [message])
return True
def DeployEpc(self, HTML):
logging.debug('Trying to deploy')
if not re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
HTML.CreateHtmlTestRow(self.Type, 'KO', CONST.INVALID_PARAMETER)
HTML.CreateHtmlTabFooter(False)
logging.error('Deploy not possible with this EPC type: ' + self.Type)
return False
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.SourceCodePath == '' or self.Type == '':
HELP.GenericHelp(CONST.Version)
HELP.EPCSrvHelp(self.IPAddress, self.UserName, self.Password, self.SourceCodePath, self.Type)
logging.error('Insufficient EPC Parameters')
return False
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
mySSH.command('docker-compose --version', '\$', 5)
result = re.search('docker-compose version 1|Docker Compose version v2', mySSH.getBefore())
if result is None:
mySSH.close()
HTML.CreateHtmlTestRow(self.Type, 'KO', CONST.INVALID_PARAMETER)
HTML.CreateHtmlTabFooter(False)
logging.error('docker-compose not installed on ' + self.IPAddress)
return False
# Checking if it is a MAGMA deployment
self.isMagmaUsed = False
if os.path.isfile('./' + self.yamlPath + '/redis_extern.conf'):
self.isMagmaUsed = True
logging.debug('MAGMA MME is used!')
mySSH.command('if [ -d ' + self.SourceCodePath + '/scripts ]; then echo ' + self.Password + ' | sudo -S rm -Rf ' + self.SourceCodePath + '/scripts ; fi', '\$', 5)
mySSH.command('if [ -d ' + self.SourceCodePath + '/logs ]; then echo ' + self.Password + ' | sudo -S rm -Rf ' + self.SourceCodePath + '/logs ; fi', '\$', 5)
mySSH.command('mkdir -p ' + self.SourceCodePath + '/scripts ' + self.SourceCodePath + '/logs', '\$', 5)
mySSH.command('rm -f ' + self.SourceCodePath + '/*.log', '\$', 5)
# deploying and configuring the cassandra database
# container names and services are currently hard-coded.
# they could be recovered by:
# - docker-compose config --services
# - docker-compose config | grep container_name
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/docker-compose.yml', self.SourceCodePath + '/scripts')
if self.isMagmaUsed:
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/entrypoint.sh', self.SourceCodePath + '/scripts')
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/mme.conf', self.SourceCodePath + '/scripts')
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/mme_fd.sprint.conf', self.SourceCodePath + '/scripts')
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/redis_extern.conf', self.SourceCodePath + '/scripts')
mySSH.command('chmod a+x ' + self.SourceCodePath + '/scripts/entrypoint.sh', '\$', 5)
else:
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/entrypoint.sh', self.SourceCodePath + '/scripts')
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/mme.conf', self.SourceCodePath + '/scripts')
mySSH.command('chmod 775 entrypoint.sh', '\$', 60)
mySSH.command('wget --quiet --tries=3 --retry-connrefused https://raw.githubusercontent.com/OPENAIRINTERFACE/openair-hss/develop/src/hss_rel14/db/oai_db.cql', '\$', 30)
mySSH.command('docker-compose down -v', '\$', 60)
mySSH.command('docker-compose up -d db_init', '\$', 60)
# databases take time...
time.sleep(10)
cnt = 0
db_init_status = False
while (cnt < 10):
mySSH.command('docker logs prod-db-init', '\$', 5)
result = re.search('OK', mySSH.getBefore())
if result is not None:
cnt = 10
db_init_status = True
else:
time.sleep(5)
cnt += 1
mySSH.command('docker rm -f prod-db-init', '\$', 5)
if not db_init_status:
HTML.CreateHtmlTestRow(self.Type, 'KO', CONST.INVALID_PARAMETER)
HTML.CreateHtmlTabFooter(False)
logging.error('Cassandra DB deployment/configuration went wrong!')
return True
# deploying EPC cNFs
mySSH.command('docker-compose up -d oai_spgwu', '\$', 60)
if self.isMagmaUsed:
listOfContainers = 'prod-cassandra prod-oai-hss prod-magma-mme prod-oai-spgwc prod-oai-spgwu-tiny prod-redis'
expectedHealthyContainers = 6
else:
listOfContainers = 'prod-cassandra prod-oai-hss prod-oai-mme prod-oai-spgwc prod-oai-spgwu-tiny'
expectedHealthyContainers = 5
# Checking for additional services
mySSH.command('docker-compose config', '\$', 5)
configResponse = mySSH.getBefore()
if configResponse.count('trf_gen') == 1:
mySSH.command('docker-compose up -d trf_gen', '\$', 60)
listOfContainers += ' prod-trf-gen'
expectedHealthyContainers += 1
mySSH.command('docker-compose config | grep --colour=never image', '\$', 10)
html_cell = ''
listOfImages = mySSH.getBefore()
for imageLine in listOfImages.split('\\r\\n'):
res1 = re.search('image: (?P<name>[a-zA-Z0-9\-]+):(?P<tag>[a-zA-Z0-9\-]+)', str(imageLine))
res2 = re.search('cassandra|redis', str(imageLine))
if res1 is not None and res2 is None:
html_cell += res1.group('name') + ':' + res1.group('tag') + ' '
nbChars = len(res1.group('name')) + len(res1.group('tag')) + 2
while (nbChars < 32):
html_cell += ' '
nbChars += 1
mySSH.command('docker image inspect --format="Size = {{.Size}} bytes" ' + res1.group('name') + ':' + res1.group('tag'), '\$', 10)
res3 = re.search('Size *= *(?P<size>[0-9\-]*) *bytes', mySSH.getBefore())
if res3 is not None:
imageSize = int(res3.group('size'))
imageSize = int(imageSize/(1024*1024))
html_cell += str(imageSize) + ' MBytes '
mySSH.command('docker image inspect --format="Date = {{.Created}}" ' + res1.group('name') + ':' + res1.group('tag'), '\$', 10)
res4 = re.search('Date *= *(?P<date>[0-9\-]*)T', mySSH.getBefore())
if res4 is not None:
html_cell += '(' + res4.group('date') + ')'
html_cell += '\n'
# Checking if all are healthy
cnt = 0
while (cnt < 3):
mySSH.command('docker inspect --format=\'{{.State.Health.Status}}\' ' + listOfContainers, '\$', 10)
unhealthyNb = mySSH.getBefore().count('unhealthy')
healthyNb = mySSH.getBefore().count('healthy') - unhealthyNb
startingNb = mySSH.getBefore().count('starting')
if healthyNb == expectedHealthyContainers:
cnt = 10
else:
time.sleep(10)
cnt += 1
logging.debug(' -- ' + str(healthyNb) + ' healthy container(s)')
logging.debug(' -- ' + str(unhealthyNb) + ' unhealthy container(s)')
logging.debug(' -- ' + str(startingNb) + ' still starting container(s)')
if healthyNb == expectedHealthyContainers:
mySSH.command('docker exec -d prod-oai-hss /bin/bash -c "nohup tshark -i any -f \'port 9042 or port 3868\' -w /tmp/hss_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
if self.isMagmaUsed:
mySSH.command('docker exec -d prod-magma-mme /bin/bash -c "nohup tshark -i any -f \'port 3868 or port 2123 or port 36412\' -w /tmp/mme_check_run.pcap 2>&1 > /dev/null"', '\$', 10)
else:
mySSH.command('docker exec -d prod-oai-mme /bin/bash -c "nohup tshark -i any -f \'port 3868 or port 2123 or port 36412\' -w /tmp/mme_check_run.pcap 2>&1 > /dev/null"', '\$', 10)
mySSH.command('docker exec -d prod-oai-spgwc /bin/bash -c "nohup tshark -i any -f \'port 2123 or port 8805\' -w /tmp/spgwc_check_run.pcap 2>&1 > /dev/null"', '\$', 10)
# on SPGW-U, not capturing on SGI to avoid huge file
mySSH.command('docker exec -d prod-oai-spgwu-tiny /bin/bash -c "nohup tshark -i any -f \'port 8805\' -w /tmp/spgwu_check_run.pcap 2>&1 > /dev/null"', '\$', 10)
mySSH.close()
logging.debug('Deployment OK')
HTML.CreateHtmlTestRowQueue(self.Type, 'OK', [html_cell])
return True
else:
mySSH.close()
logging.debug('Deployment went wrong')
HTML.CreateHtmlTestRowQueue(self.Type, 'KO', [html_cell])
return False
def UndeployEpc(self, HTML):
logging.debug('Trying to undeploy')
# No check down, we suppose everything done before.
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
# Checking if it is a MAGMA deployment.
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('docker-compose ps -a', '\$', 5)
self.isMagmaUsed = False
result = re.search('magma', mySSH.getBefore())
if result is not None:
self.isMagmaUsed = True
logging.debug('MAGMA MME is used!')
# Recovering logs and pcap files
mySSH.command('cd ' + self.SourceCodePath + '/logs', '\$', 5)
mySSH.command('docker exec -it prod-oai-hss /bin/bash -c "killall --signal SIGINT oai_hss tshark"', '\$', 5)
if self.isMagmaUsed:
mySSH.command('docker exec -it prod-magma-mme /bin/bash -c "killall --signal SIGINT tshark"', '\$', 5)
else:
mySSH.command('docker exec -it prod-oai-mme /bin/bash -c "killall --signal SIGINT tshark"', '\$', 5)
mySSH.command('docker exec -it prod-oai-spgwc /bin/bash -c "killall --signal SIGINT oai_spgwc tshark"', '\$', 5)
mySSH.command('docker exec -it prod-oai-spgwu-tiny /bin/bash -c "killall --signal SIGINT tshark"', '\$', 5)
mySSH.command('docker logs prod-oai-hss > hss_' + self.testCase_id + '.log', '\$', 5)
if self.isMagmaUsed:
mySSH.command('docker cp --follow-link prod-magma-mme:/var/log/mme.log mme_' + self.testCase_id + '.log', '\$', 15)
else:
mySSH.command('docker logs prod-oai-mme > mme_' + self.testCase_id + '.log', '\$', 5)
mySSH.command('docker logs prod-oai-spgwc > spgwc_' + self.testCase_id + '.log', '\$', 5)
mySSH.command('docker logs prod-oai-spgwu-tiny > spgwu_' + self.testCase_id + '.log', '\$', 5)
mySSH.command('docker cp prod-oai-hss:/tmp/hss_check_run.pcap hss_' + self.testCase_id + '.pcap', '\$', 60)
if self.isMagmaUsed:
mySSH.command('docker cp prod-magma-mme:/tmp/mme_check_run.pcap mme_' + self.testCase_id + '.pcap', '\$', 60)
else:
mySSH.command('docker cp prod-oai-mme:/tmp/mme_check_run.pcap mme_' + self.testCase_id + '.pcap', '\$', 60)
mySSH.command('tshark -r mme_' + self.testCase_id + '.pcap | grep -E --colour=never "Tracking area update"', '\$', 60)
result = re.search('Tracking area update request', mySSH.getBefore())
if result is not None:
message = 'UE requested ' + str(mySSH.getBefore().count('Tracking area update request')) + 'Tracking area update request(s)'
else:
message = 'No Tracking area update request'
logging.debug(message)
mySSH.command('docker cp prod-oai-spgwc:/tmp/spgwc_check_run.pcap spgwc_' + self.testCase_id + '.pcap', '\$', 60)
mySSH.command('docker cp prod-oai-spgwu-tiny:/tmp/spgwu_check_run.pcap spgwu_' + self.testCase_id + '.pcap', '\$', 60)
# Remove all
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
if self.isMagmaUsed:
listOfContainers = 'prod-cassandra prod-oai-hss prod-magma-mme prod-oai-spgwc prod-oai-spgwu-tiny prod-redis'
nbContainers = 6
else:
listOfContainers = 'prod-cassandra prod-oai-hss prod-oai-mme prod-oai-spgwc prod-oai-spgwu-tiny'
nbContainers = 5
# Checking for additional services
mySSH.command('docker-compose config', '\$', 5)
configResponse = mySSH.getBefore()
if configResponse.count('trf_gen') == 1:
listOfContainers += ' prod-trf-gen'
nbContainers += 1
mySSH.command('docker-compose down -v', '\$', 60)
mySSH.command('docker inspect --format=\'{{.State.Health.Status}}\' ' + listOfContainers, '\$', 10)
noMoreContainerNb = mySSH.getBefore().count('No such object')
mySSH.command('docker inspect --format=\'{{.Name}}\' prod-oai-public-net prod-oai-private-net', '\$', 10)
noMoreNetworkNb = mySSH.getBefore().count('No such object')
mySSH.close()
if noMoreContainerNb == nbContainers and noMoreNetworkNb == 2:
logging.debug('Undeployment OK')
HTML.CreateHtmlTestRowQueue(self.Type, 'OK', [message])
return True
else:
logging.debug('Undeployment went wrong')
HTML.CreateHtmlTestRowQueue(self.Type, 'KO', [message])
return False
def LogCollectHSS(self):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('rm -f hss.log.zip', '\$', 5)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker inspect prod-oai-hss', '\$', 10)
result = re.search('No such object', mySSH.getBefore())
if result is not None:
mySSH.command('cd ../logs', '\$', 5)
mySSH.command('rm -f hss.log.zip', '\$', 5)
mySSH.command('zip hss.log.zip hss_*.*', '\$', 60)
mySSH.command('mv hss.log.zip ../scripts', '\$', 60)
else:
mySSH.command('docker cp ' + self.containerPrefix + '-oai-hss:/openair-hss/hss_check_run.log .', '\$', 60)
mySSH.command('docker cp ' + self.containerPrefix + '-oai-hss:/tmp/hss_check_run.pcap .', '\$', 60)
mySSH.command('zip hss.log.zip hss_check_run.*', '\$', 60)
elif re.match('OAICN5G', self.Type, re.IGNORECASE):
logging.debug('LogCollect is bypassed for that variant')
elif re.match('OC-OAI-CN5G', self.Type, re.IGNORECASE):
logging.debug('LogCollect is bypassed for that variant')
elif re.match('OAI', self.Type, re.IGNORECASE) or re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('zip hss.log.zip hss*.log', '\$', 60)
mySSH.command('echo ' + self.Password + ' | sudo -S rm hss*.log', '\$', 5)
if re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('zip hss.log.zip logs/hss*.* *.pcap', '\$', 60)
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f logs/hss*.* *.pcap', '\$', 5)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cp /opt/hss_sim0609/hss.log .', '\$', 60)
mySSH.command('zip hss.log.zip hss.log', '\$', 60)
else:
logging.error('This option should not occur!')
mySSH.close()
def LogCollectMME(self):
if self.Type != 'OC-OAI-CN5G':
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('rm -f mme.log.zip', '\$', 5)
else:
mySSH = cls_cmd.getConnection(self.IPAddress)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker inspect prod-oai-mme', '\$', 10)
result = re.search('No such object', mySSH.getBefore())
if result is not None:
mySSH.command('cd ../logs', '\$', 5)
mySSH.command('rm -f mme.log.zip', '\$', 5)
mySSH.command('zip mme.log.zip mme_*.*', '\$', 60)
mySSH.command('mv mme.log.zip ../scripts', '\$', 60)
else:
mySSH.command('docker cp ' + self.containerPrefix + '-oai-mme:/openair-mme/mme_check_run.log .', '\$', 60)
mySSH.command('docker cp ' + self.containerPrefix + '-oai-mme:/tmp/mme_check_run.pcap .', '\$', 60)
mySSH.command('zip mme.log.zip mme_check_run.*', '\$', 60)
elif re.match('OAICN5G', self.Type, re.IGNORECASE):
mySSH.command('cd ' + self.SourceCodePath + '/logs','\$', 5)
mySSH.command('cp -f /tmp/oai-cn5g-v1.5.pcap .','\$', 30)
mySSH.command('zip mme.log.zip oai-amf.log oai-nrf.log oai-cn5g*.pcap','\$', 30)
mySSH.command('mv mme.log.zip ' + self.SourceCodePath + '/scripts','\$', 30)
elif re.match('OC-OAI-CN5G', self.Type, re.IGNORECASE):
mySSH.run('cd ' + self.SourceCodePath + '/logs')
mySSH.run('zip mme.log.zip oai-amf.log oai-nrf.log oai-cn5g*.pcap')
mySSH.run('mv mme.log.zip ' + self.SourceCodePath + '/scripts')
elif re.match('OAI', self.Type, re.IGNORECASE) or re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('zip mme.log.zip mme*.log', '\$', 60)
mySSH.command('echo ' + self.Password + ' | sudo -S rm mme*.log', '\$', 5)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cp /opt/ltebox/var/log/*Log.0 .', '\$', 5)
mySSH.command('zip mme.log.zip mmeLog.0 s1apcLog.0 s1apsLog.0 s11cLog.0 libLog.0 s1apCodecLog.0 amfLog.0 ngapcLog.0 ngapcommonLog.0 ngapsLog.0', '\$', 60)
else:
logging.error('This option should not occur!')
mySSH.close()
def LogCollectSPGW(self):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('rm -f spgw.log.zip', '\$', 5)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker inspect prod-oai-mme', '\$', 10)
result = re.search('No such object', mySSH.getBefore())
if result is not None:
mySSH.command('cd ../logs', '\$', 5)
mySSH.command('rm -f spgw.log.zip', '\$', 5)
mySSH.command('zip spgw.log.zip spgw*.*', '\$', 60)
mySSH.command('mv spgw.log.zip ../scripts', '\$', 60)
else:
mySSH.command('docker cp ' + self.containerPrefix + '-oai-spgwc:/openair-spgwc/spgwc_check_run.log .', '\$', 60)
mySSH.command('docker cp ' + self.containerPrefix + '-oai-spgwu-tiny:/openair-spgwu-tiny/spgwu_check_run.log .', '\$', 60)
mySSH.command('docker cp ' + self.containerPrefix + '-oai-spgwc:/tmp/spgwc_check_run.pcap .', '\$', 60)
mySSH.command('docker cp ' + self.containerPrefix + '-oai-spgwu-tiny:/tmp/spgwu_check_run.pcap .', '\$', 60)
mySSH.command('zip spgw.log.zip spgw*_check_run.*', '\$', 60)
elif re.match('OAICN5G', self.Type, re.IGNORECASE):
mySSH.command('cd ' + self.SourceCodePath + '/logs','\$', 5)
mySSH.command('zip spgw.log.zip oai-smf.log oai-spgwu.log','\$', 30)
mySSH.command('mv spgw.log.zip ' + self.SourceCodePath + '/scripts','\$', 30)
elif re.match('OC-OAI-CN5G', self.Type, re.IGNORECASE):
mySSH.command('cd ' + self.SourceCodePath + '/logs','\$', 5)
mySSH.command('zip spgw.log.zip oai-smf.log oai-spgwu.log','\$', 30)
mySSH.command('mv spgw.log.zip ' + self.SourceCodePath + '/scripts','\$', 30)
elif re.match('OAI', self.Type, re.IGNORECASE) or re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('zip spgw.log.zip spgw*.log', '\$', 60)
mySSH.command('echo ' + self.Password + ' | sudo -S rm spgw*.log', '\$', 5)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cp /opt/ltebox/var/log/*Log.0 .', '\$', 5)
mySSH.command('zip spgw.log.zip xGwLog.0 upfLog.0', '\$', 60)
else:
logging.error('This option should not occur!')
mySSH.close()
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
# Python for CI of OAI-eNB + COTS-UE
#
# Required Python Version
# Python 3.x
#
# Required Python Package
# pexpect
#---------------------------------------------------------------------
#-----------------------------------------------------------
# Functions Declaration
#-----------------------------------------------------------
def GenericHelp(vers):
print('----------------------------------------------------------------------------------------------------------------------')
print('main.py Ver: ' + vers)
print('----------------------------------------------------------------------------------------------------------------------')
print('python main.py [options]')
print(' --help Show this help.')
print(' --mode=[Mode]')
print(' TesteNB')
print(' InitiateHtml, FinalizeHtml')
print(' TerminateeNB, TerminateHSS, TerminateMME, TerminateSPGW')
print(' LogCollectBuild, LogCollecteNB, LogCollectHSS, LogCollectMME, LogCollectSPGW, LogCollectPing, LogCollectIperf')
print(' --local Force local execution: rewrites the test xml script before running to always execute on localhost. Assumes')
print(' images are available locally, will not remove any images and will run inside the current repo directory')
def GitSrvHelp(repository,branch,commit,mergeallow,targetbranch):
print(' --ranRepository=[OAI RAN Repository URL] -- ' + repository)
print(' --ranBranch=[OAI RAN Repository Branch] -- ' + branch)
print(' --ranCommitID=[OAI RAN Repository Commit SHA-1] -- ' + commit)
print(' --ranAllowMerge=[Allow Merge Request (with target branch) (true or false)] -- ' + mergeallow)
print(' --ranTargetBranch=[Target Branch in case of a Merge Request] -- ' + targetbranch)
def eNBSrvHelp(ipaddr, username, password, sourcepath):
print(' --eNBIPAddress=[eNB\'s IP Address] -- ' + ipaddr)
print(' --eNBUserName=[eNB\'s Login User Name] -- ' + username)
print(' --eNBPassword=[eNB\'s Login Password] -- ' + password)
print(' --eNBSourceCodePath=[eNB\'s Source Code Path] -- ' + sourcepath)
def OAIUESrvHelp(ipaddr, username, password, sourcepath):
print(' --UEIPAddress=[UE\'s IP Address] -- ' + ipaddr)
print(' --UEUserName=[UE\'s Login User Name] -- ' + username)
print(' --UEPassword=[UE\'s Login Password] -- ' + password)
print(' --UESourceCodePath=[UE\'s Source Code Path] -- ' + sourcepath)
def EPCSrvHelp(ipaddr, username, password, sourcepath, epctype):
print(' --EPCIPAddress=[EPC\'s IP Address] -- ' + ipaddr)
print(' --EPCUserName=[EPC\'s Login User Name] -- ' + username)
print(' --EPCPassword=[EPC\'s Login Password] -- ' + password)
print(' --EPCSourceCodePath=[EPC\'s Source Code Path] -- ' + sourcepath)
print(' --EPCType=[EPC\'s Type: OAI or ltebox or OC-OAI-CN5G] -- ' + epctype)
def XmlHelp(filename):
print(' --XMLTestFile=[XML Test File to be run] -- ' + filename)
print(' Note: multiple xml files can be specified (--XMLFile=File1 ... --XMLTestFile=FileN) when HTML headers are created ("InitiateHtml" mode)')
......@@ -28,1605 +28,407 @@
# pexpect
#---------------------------------------------------------------------
#-----------------------------------------------------------
# Version
# Import Components
#-----------------------------------------------------------
Version = '0.1'
import helpreadme as HELP
import constants as CONST
import cls_oaicitest #main class for OAI CI test framework
import cls_containerize #class Containerize for all container-based operations on RAN/UE objects
import cls_static_code_analysis #class for static code analysis
import cls_physim1 #class PhySim for physical simulators deploy and run
import cls_cluster # class for building/deploying on cluster
import cls_native # class for all native/source-based operations
import epc
import ran
import cls_oai_html
#-----------------------------------------------------------
# Import
# Import Libs
#-----------------------------------------------------------
import sys # arg
import re # reg
import pexpect # pexpect
import time # sleep
import os
import subprocess
import xml.etree.ElementTree as ET
import logging
import datetime
import signal
from multiprocessing import Process, Lock, SimpleQueue
import traceback
logging.basicConfig(
level=logging.DEBUG,
format="[%(asctime)s] %(name)s:%(levelname)s: %(message)s"
stream=sys.stdout,
format="[%(asctime)s] %(levelname)8s: %(message)s"
)
#-----------------------------------------------------------
# Class Declaration
# General Functions
#-----------------------------------------------------------
class SSHConnection():
def __init__(self):
self.eNBIPAddress = ''
self.eNBRepository = ''
self.eNBBranch = ''
self.eNB_AllowMerge = False
self.eNBCommitID = ''
self.eNBUserName = ''
self.eNBPassword = ''
self.eNBSourceCodePath = ''
self.EPCIPAddress = ''
self.EPCUserName = ''
self.EPCPassword = ''
self.EPCSourceCodePath = ''
self.EPCType = ''
self.ADBIPAddress = ''
self.ADBUserName = ''
self.ADBPassword = ''
self.testCase_id = ''
self.testXMLfile = ''
self.desc = ''
self.Build_eNB_args = ''
self.Initialize_eNB_args = ''
self.ping_args = ''
self.ping_packetloss_threshold = ''
self.iperf_args = ''
self.iperf_packetloss_threshold = ''
self.iperf_profile = ''
self.UEDevices = []
self.UEIPAddresses = []
self.htmlFile = ''
self.htmlHeaderCreated = False
self.htmlFooterCreated = False
self.htmlUEConnected = 0
def open(self, ipaddress, username, password):
self.ssh = pexpect.spawn('ssh', [username + '@' + ipaddress], timeout = 5)
self.sshresponse = self.ssh.expect(['Are you sure you want to continue connecting (yes/no)?', 'password:', 'Last login', pexpect.EOF, pexpect.TIMEOUT])
if self.sshresponse == 0:
self.ssh.sendline('yes')
self.ssh.expect('password:')
self.ssh.sendline(password)
self.sshresponse = self.ssh.expect(['\$', 'Permission denied', 'password:', pexpect.EOF, pexpect.TIMEOUT])
if self.sshresponse == 0:
pass
else:
logging.debug('self.sshresponse = ' + str(self.sshresponse))
sys.exit('SSH Connection Failed')
elif self.sshresponse == 1:
self.ssh.sendline(password)
self.sshresponse = self.ssh.expect(['\$', 'Permission denied', 'password:', pexpect.EOF, pexpect.TIMEOUT])
if self.sshresponse == 0:
pass
else:
logging.debug('self.sshresponse = ' + str(self.sshresponse))
sys.exit('SSH Connection Failed')
elif self.sshresponse == 2:
# Checking if we are really on the remote client defined by its IP address
self.command('stdbuf -o0 ifconfig | egrep --color=never "inet addr:"', '\$', 5)
result = re.search(str(ipaddress), str(self.ssh.before))
if result is None:
sys.exit('SSH Connection Failed: TIMEOUT !!!')
pass
else:
# debug output
logging.debug(str(self.ssh.before))
logging.debug('self.sshresponse = ' + str(self.sshresponse))
sys.exit('SSH Connection Failed!!!')
def command(self, commandline, expectedline, timeout):
logging.debug(commandline)
self.ssh.timeout = timeout
self.ssh.sendline(commandline)
self.sshresponse = self.ssh.expect([expectedline, pexpect.EOF, pexpect.TIMEOUT])
if self.sshresponse == 0:
pass
elif self.sshresponse == 1:
logging.debug('\u001B[1;37;41m Unexpected EOF \u001B[0m')
logging.debug('Expected Line : ' + expectedline)
sys.exit(self.sshresponse)
elif self.sshresponse == 2:
logging.debug('\u001B[1;37;41m Unexpected TIMEOUT \u001B[0m')
logging.debug('Expected Line : ' + expectedline)
sys.exit(self.sshresponse)
else:
logging.debug('\u001B[1;37;41m Unexpected Others \u001B[0m')
logging.debug('Expected Line : ' + expectedline)
sys.exit(self.sshresponse)
def close(self):
self.ssh.timeout = 5
self.ssh.sendline('exit')
self.sshresponse = self.ssh.expect([pexpect.EOF, pexpect.TIMEOUT])
if self.sshresponse == 0:
pass
elif self.sshresponse == 1:
logging.debug('\u001B[1;37;41m Unexpected TIMEOUT \u001B[0m')
else:
logging.debug('\u001B[1;37;41m Unexpected Others \u001B[0m')
def copy(self, ipaddress, username, password, source, destination):
logging.debug('scp '+ username + '@' + ipaddress + ':' + source + ' ' + destination)
scp_spawn = pexpect.spawn('scp '+ username + '@' + ipaddress + ':' + source + ' ' + destination, timeout = 5)
scp_response = scp_spawn.expect(['Are you sure you want to continue connecting (yes/no)?', 'password:', pexpect.EOF, pexpect.TIMEOUT])
if scp_response == 0:
scp_spawn.sendline('yes')
scp_spawn.expect('password:')
scp_spawn.sendline(password)
scp_response = scp_spawn.expect(['\$', 'Permission denied', 'password:', pexpect.EOF, pexpect.TIMEOUT])
if scp_response == 0:
pass
else:
logging.debug('1 - scp_response = ' + str(scp_response))
sys.exit('SCP failed')
elif scp_response == 1:
scp_spawn.sendline(password)
scp_response = scp_spawn.expect(['\$', 'Permission denied', 'password:', pexpect.EOF, pexpect.TIMEOUT])
if scp_response == 0 or scp_response == 3:
pass
else:
logging.debug('2 - scp_response = ' + str(scp_response))
sys.exit('SCP failed')
elif scp_response == 2:
pass
else:
logging.debug('3 - scp_response = ' + str(scp_response))
sys.exit('SCP failed')
def BuildeNB(self):
if self.eNBIPAddress == '' or self.eNBRepository == '' or self.eNBBranch == '' or self.eNBUserName == '' or self.eNBPassword == '' or self.eNBSourceCodePath == '':
Usage()
sys.exit('Insufficient Parameter')
self.open(self.eNBIPAddress, self.eNBUserName, self.eNBPassword)
self.command('mkdir -p ' + self.eNBSourceCodePath, '\$', 5)
self.command('cd ' + self.eNBSourceCodePath, '\$', 5)
self.command('if [ ! -e .git ]; then stdbuf -o0 git clone ' + self.eNBRepository + ' .; else stdbuf -o0 git fetch; fi', '\$', 600)
# Raphael: here add a check if git clone or git fetch went smoothly
self.command('git config user.email "jenkins@openairinterface.org"', '\$', 5)
self.command('git config user.name "OAI Jenkins"', '\$', 5)
self.command('echo ' + self.eNBPassword + ' | sudo -S git clean -x -d -ff', '\$', 30)
# if the commit ID is provided use it to point to it
if self.eNBCommitID != '':
self.command('git checkout -f ' + self.eNBCommitID, '\$', 5)
# if the branch is not develop, then it is a merge request and we need to do
# the potential merge. Note that merge conflicts should already been checked earlier
if (self.eNB_AllowMerge):
if (self.eNBBranch != 'develop') and (self.eNBBranch != 'origin/develop'):
self.command('git merge --ff origin/develop -m "Temporary merge for CI"', '\$', 5)
self.command('source oaienv', '\$', 5)
self.command('cd cmake_targets', '\$', 5)
self.command('mkdir -p log', '\$', 5)
# no need to remove in log (git clean did the trick)
self.command('echo ' + self.eNBPassword + ' | sudo -S stdbuf -o0 ./build_oai ' + self.Build_eNB_args + ' 2>&1 | stdbuf -o0 tee -a compile_oai_enb.log', 'Bypassing the Tests', 600)
self.command('mkdir -p build_log_' + SSH.testCase_id, '\$', 5)
self.command('echo ' + self.eNBPassword + ' | sudo -S mv log/* ' + 'build_log_' + SSH.testCase_id, '\$', 5)
self.command('echo ' + self.eNBPassword + ' | sudo -S mv compile_oai_enb.log ' + 'build_log_' + SSH.testCase_id, '\$', 5)
self.close()
self.CreateHtmlTestRow(self.Build_eNB_args, 'OK', 0)
def InitializeHSS(self):
if self.EPCIPAddress == '' or self.EPCUserName == '' or self.EPCPassword == '' or self.EPCSourceCodePath == '' or self.EPCType == '':
Usage()
sys.exit('Insufficient Parameter')
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
if re.match('OAI', self.EPCType, re.IGNORECASE):
logging.debug('Using the OAI EPC HSS')
self.command('cd ' + self.EPCSourceCodePath, '\$', 5)
self.command('source oaienv', '\$', 5)
self.command('cd scripts', '\$', 5)
self.command('echo ' + self.EPCPassword + ' | sudo -S ./run_hss 2>&1 | stdbuf -o0 awk \'{ print strftime("[%Y/%m/%d %H:%M:%S] ",systime()) $0 }\' | stdbuf -o0 tee -a hss_' + SSH.testCase_id + '.log &', 'Core state: 2 -> 3', 35)
def CheckClassValidity(xml_class_list,action,id):
if action not in xml_class_list:
logging.error('test-case ' + id + ' has unlisted class ' + action + ' ##CHECK xml_class_list.yml')
resp=False
else:
resp=True
return resp
#assigning parameters to object instance attributes (even if the attributes do not exist !!)
def AssignParams(params_dict):
for key,value in params_dict.items():
setattr(CiTestObj, key, value)
setattr(RAN, key, value)
setattr(HTML, key, value)
def ExecuteActionWithParam(action):
global EPC
global RAN
global HTML
global CONTAINERS
global SCA
global PHYSIM
global CLUSTER
if action == 'Build_eNB' or action == 'Build_Image' or action == 'Build_Proxy' or action == "Build_Cluster_Image" or action == "Build_Run_Tests":
RAN.Build_eNB_args=test.findtext('Build_eNB_args')
CONTAINERS.imageKind=test.findtext('kind')
forced_workspace_cleanup = test.findtext('forced_workspace_cleanup')
RAN.Build_eNB_forced_workspace_cleanup=False
CONTAINERS.forcedWorkspaceCleanup=False
CLUSTER.forcedWorkspaceCleanup = False
if forced_workspace_cleanup is not None and re.match('true', forced_workspace_cleanup, re.IGNORECASE):
RAN.Build_eNB_forced_workspace_cleanup = True
CONTAINERS.forcedWorkspaceCleanup = True
CLUSTER.forcedWorkspaceCleanup = True
eNB_instance=test.findtext('eNB_instance')
if (eNB_instance is None):
RAN.eNB_instance=0
CONTAINERS.eNB_instance=0
else:
logging.debug('Using the ltebox simulated HSS')
self.command('if [ -d ' + self.EPCSourceCodePath + '/scripts ]; then echo ' + self.eNBPassword + ' | sudo -S rm -Rf ' + self.EPCSourceCodePath + '/scripts ; fi', '\$', 5)
self.command('mkdir -p ' + self.EPCSourceCodePath + '/scripts', '\$', 5)
self.command('cd /opt/hss_sim0609', '\$', 5)
self.command('echo ' + self.EPCPassword + ' | sudo -S rm -f hss.log daemon.log', '\$', 5)
self.command('echo ' + self.EPCPassword + ' | sudo -S echo "Starting sudo session" && sudo daemon --unsafe --name=simulated_hss --chdir=/opt/hss_sim0609 ./starthss_real ', '\$', 5)
self.close()
self.CreateHtmlTestRow(self.EPCType, 'OK', 0)
def InitializeMME(self):
if self.EPCIPAddress == '' or self.EPCUserName == '' or self.EPCPassword == '' or self.EPCSourceCodePath == '' or self.EPCType == '':
Usage()
sys.exit('Insufficient Parameter')
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
if re.match('OAI', self.EPCType, re.IGNORECASE):
self.command('cd ' + self.EPCSourceCodePath, '\$', 5)
self.command('source oaienv', '\$', 5)
self.command('cd scripts', '\$', 5)
self.command('stdbuf -o0 hostname', '\$', 5)
result = re.search('hostname\\\\r\\\\n(?P<host_name>[a-zA-Z0-9\-\_]+)\\\\r\\\\n', str(self.ssh.before))
if result is None:
logging.debug('\u001B[1;37;41m Hostname Not Found! \u001B[0m')
sys.exit(1)
host_name = result.group('host_name')
self.command('echo ' + self.EPCPassword + ' | sudo -S ./run_mme 2>&1 | stdbuf -o0 tee -a mme_' + SSH.testCase_id + '.log &', 'MME app initialization complete', 100)
RAN.eNB_instance=int(eNB_instance)
CONTAINERS.eNB_instance=int(eNB_instance)
eNB_serverId=test.findtext('eNB_serverId')
if (eNB_serverId is None):
RAN.eNB_serverId[RAN.eNB_instance]='0'
CONTAINERS.eNB_serverId[RAN.eNB_instance]='0'
else:
self.command('cd /opt/ltebox/tools', '\$', 5)
self.command('echo ' + self.EPCPassword + ' | sudo -S ./start_mme', '\$', 5)
self.close()
self.CreateHtmlTestRow(self.EPCType, 'OK', 0)
def InitializeSPGW(self):
if self.EPCIPAddress == '' or self.EPCUserName == '' or self.EPCPassword == '' or self.EPCSourceCodePath == '' or self.EPCType == '':
Usage()
sys.exit('Insufficient Parameter')
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
if re.match('OAI', self.EPCType, re.IGNORECASE):
self.command('cd ' + self.EPCSourceCodePath, '\$', 5)
self.command('source oaienv', '\$', 5)
self.command('cd scripts', '\$', 5)
self.command('echo ' + self.EPCPassword + ' | sudo -S ./run_spgw 2>&1 | stdbuf -o0 tee -a spgw_' + SSH.testCase_id + '.log &', 'Initializing SPGW-APP task interface: DONE', 30)
RAN.eNB_serverId[RAN.eNB_instance]=eNB_serverId
CONTAINERS.eNB_serverId[CONTAINERS.eNB_instance]=eNB_serverId
xmlBgBuildField = test.findtext('backgroundBuild')
if (xmlBgBuildField is None):
RAN.backgroundBuild=False
else:
self.command('cd /opt/ltebox/tools', '\$', 5)
self.command('echo ' + self.EPCPassword + ' | sudo -S ./start_xGw', '\$', 5)
self.close()
self.CreateHtmlTestRow(self.EPCType, 'OK', 0)
def InitializeeNB(self):
if self.eNBIPAddress == '' or self.eNBUserName == '' or self.eNBPassword == '' or self.eNBSourceCodePath == '':
Usage()
sys.exit('Insufficient Parameter')
initialize_eNB_flag = True
pStatus = self.CheckProcessExist(initialize_eNB_flag)
if (pStatus < 0):
self.CreateHtmlTestRow(self.Initialize_eNB_args, 'KO', pStatus)
self.CreateHtmlFooter()
sys.exit(1)
self.open(self.eNBIPAddress, self.eNBUserName, self.eNBPassword)
self.command('cd ' + self.eNBSourceCodePath, '\$', 5)
# Initialize_eNB_args usually start with -O and followed by the location in repository
full_config_file = self.Initialize_eNB_args.replace('-O ','')
extIdx = full_config_file.find('.conf')
if (extIdx > 0):
extra_options = full_config_file[extIdx + 5:]
full_config_file = full_config_file[:extIdx + 5]
config_path, config_file = os.path.split(full_config_file)
else:
sys.exit('Insufficient Parameter')
ci_full_config_file = config_path + '/ci-' + config_file
# Make a copy and adapt to EPC / eNB IP addresses
self.command('cp ' + full_config_file + ' ' + ci_full_config_file, '\$', 5)
self.command('sed -i -e \'s/mme_ip_address.*$/mme_ip_address = ( { ipv4 = "' + self.EPCIPAddress + '";/\' ' + ci_full_config_file, '\$', 2);
self.command('sed -i -e \'s/ENB_IPV4_ADDRESS_FOR_S1_MME.*$/ENB_IPV4_ADDRESS_FOR_S1_MME = "' + self.eNBIPAddress + '";/\' ' + ci_full_config_file, '\$', 2);
self.command('sed -i -e \'s/ENB_IPV4_ADDRESS_FOR_S1U.*$/ENB_IPV4_ADDRESS_FOR_S1U = "' + self.eNBIPAddress + '";/\' ' + ci_full_config_file, '\$', 2);
self.command('sed -i -e \'s/ENB_IPV4_ADDRESS_FOR_X2C.*$/ENB_IPV4_ADDRESS_FOR_X2C = "' + self.eNBIPAddress + '";/\' ' + ci_full_config_file, '\$', 2);
# Launch eNB with the modified config file
self.command('source oaienv', '\$', 5)
self.command('cd cmake_targets', '\$', 5)
self.command('echo "./lte_build_oai/build/lte-softmodem -O ' + self.eNBSourceCodePath + '/' + ci_full_config_file + extra_options + '" > ./my-lte-softmodem-run.sh ', '\$', 5)
self.command('chmod 775 ./my-lte-softmodem-run.sh ', '\$', 5)
self.command('echo ' + self.eNBPassword + ' | sudo -S rm -Rf enb_' + SSH.testCase_id + '.log', '\$', 5)
self.command('echo ' + self.eNBPassword + ' | sudo -S -E daemon --inherit --unsafe --name=enb_daemon --chdir=' + self.eNBSourceCodePath + '/cmake_targets -o ' + self.eNBSourceCodePath + '/cmake_targets/enb_' + SSH.testCase_id + '.log ./my-lte-softmodem-run.sh', '\$', 5)
time.sleep(6)
doLoop = True
loopCounter = 10
while (doLoop):
loopCounter = loopCounter - 1
if (loopCounter == 0):
doLoop = False
# Checking if process is still alive
#self.command('stdbuf -o0 ps -aux | grep -v grep | grep --color=never lte-softmodem', '\$', 5)
#result = re.search('lte-softmodem', str(self.ssh.before))
#if result is None:
# self.command('rsync -v enb_' + SSH.testCase_id + '.log enb_' + SSH.testCase_id + '.txt; stdbuf -o0 cat enb_' + SSH.testCase_id + '.log | egrep --color=never -i "segmentation fault"', '\$', 5)
# result = re.search('egmentation fault', str(self.ssh.before))
# logging.debug('\u001B[1;37;41m eNB process is already down \u001B[0m')
# if result is not None:
# logging.debug('\u001B[1;37;41m Segmentation fault \u001B[0m')
# logging.debug(str(self.ssh.before))
# self.CreateHtmlTestRow('-O ' + config_file + extra_options, 'KO', 0)
# self.CreateHtmlFooter()
# self.close()
# sys.exit(1)
logging.debug('\u001B[1;30;43m eNB logging system did not show got sync! See with attach later \u001B[0m')
self.CreateHtmlTestRow('-O ' + config_file + extra_options, 'eNB not showing got sync!', 0)
# Not getting got sync is bypassed for the moment
#sys.exit(1)
else:
self.command('rsync -v enb_' + SSH.testCase_id + '.log enb_' + SSH.testCase_id + '.txt; stdbuf -o0 cat enb_' + SSH.testCase_id + '.log | grep --color=never -i sync', '\$', 4)
result = re.search('got sync', str(self.ssh.before))
if result is None:
time.sleep(6)
else:
doLoop = False
self.CreateHtmlTestRow('-O ' + config_file + extra_options, 'OK', 0)
logging.debug('\u001B[1m Initialize eNB Completed\u001B[0m')
self.command('rm -f enb_' + SSH.testCase_id + '.txt', '\$', 5)
self.close()
def InitializeUE_common(self, device_id):
logging.debug('send adb commands')
try:
self.open(self.ADBIPAddress, self.ADBUserName, self.ADBPassword)
# The following commands are deprecated since we no longer work on Android 7+
# self.command('stdbuf -o0 adb -s ' + device_id + ' shell settings put global airplane_mode_on 1', '\$', 10)
# self.command('stdbuf -o0 adb -s ' + device_id + ' shell am broadcast -a android.intent.action.AIRPLANE_MODE --ez state true', '\$', 60)
# a dedicated script has to be installed inside the UE
# airplane mode on means call /data/local/tmp/off
self.command('stdbuf -o0 adb -s ' + device_id + ' shell /data/local/tmp/off', '\$', 60)
#airplane mode off means call /data/local/tmp/on
logging.debug('\u001B[1mUE (' + device_id + ') Initialize Completed\u001B[0m')
self.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def InitializeUE(self):
if self.ADBIPAddress == '' or self.ADBUserName == '' or self.ADBPassword == '':
Usage()
sys.exit('Insufficient Parameter')
multi_jobs = []
for device_id in self.UEDevices:
p = Process(target = SSH.InitializeUE_common, args = (device_id,))
p.daemon = True
p.start()
multi_jobs.append(p)
for job in multi_jobs:
job.join()
self.CreateHtmlTestRow('N/A', 'OK', 0)
def AttachUE_common(self, device_id, statusQueue, lock):
try:
self.open(self.ADBIPAddress, self.ADBUserName, self.ADBPassword)
self.command('stdbuf -o0 adb -s ' + device_id + ' shell /data/local/tmp/on', '\$', 60)
time.sleep(2)
max_count = 45
count = max_count
while count > 0:
self.command('stdbuf -o0 adb -s ' + device_id + ' shell dumpsys telephony.registry | grep mDataConnectionState', '\$', 15)
result = re.search('mDataConnectionState.*=(?P<state>[0-9\-]+)', str(self.ssh.before))
if result is None:
logging.debug('\u001B[1;37;41m mDataConnectionState Not Found! \u001B[0m')
lock.acquire()
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put('mDataConnectionState Not Found!')
lock.release()
break
mDataConnectionState = int(result.group('state'))
if mDataConnectionState == 2:
logging.debug('\u001B[1mUE (' + device_id + ') Attach Completed\u001B[0m')
lock.acquire()
statusQueue.put(max_count - count)
statusQueue.put(device_id)
statusQueue.put('Attach Completed')
lock.release()
break
count = count - 1
if count == 15 or count == 30:
logging.debug('\u001B[1;37;43m Retry UE (' + device_id + ') Flight Mode Off \u001B[0m')
self.command('stdbuf -o0 adb -s ' + device_id + ' shell /data/local/tmp/off', '\$', 60)
time.sleep(0.5)
self.command('stdbuf -o0 adb -s ' + device_id + ' shell /data/local/tmp/on', '\$', 60)
time.sleep(0.5)
logging.debug('\u001B[1mWait UE (' + device_id + ') a second until mDataConnectionState=2 (' + str(max_count-count) + ' times)\u001B[0m')
time.sleep(1)
if count == 0:
logging.debug('\u001B[1;37;41m UE (' + device_id + ') Attach Failed \u001B[0m')
lock.acquire()
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put('Attach Failed')
lock.release()
self.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def AttachUE(self):
if self.ADBIPAddress == '' or self.ADBUserName == '' or self.ADBPassword == '':
Usage()
sys.exit('Insufficient Parameter')
initialize_eNB_flag = False
pStatus = self.CheckProcessExist(initialize_eNB_flag)
if (pStatus < 0):
self.CreateHtmlTestRow('N/A', 'KO', pStatus)
self.CreateHtmlFooter()
sys.exit(1)
multi_jobs = []
status_queue = SimpleQueue()
lock = Lock()
for device_id in self.UEDevices:
p = Process(target = SSH.AttachUE_common, args = (device_id, status_queue, lock,))
p.daemon = True
p.start()
multi_jobs.append(p)
for job in multi_jobs:
job.join()
if (status_queue.empty()):
self.CreateHtmlTestRow('N/A', 'KO', 0)
sys.exit(1)
if re.match('true', xmlBgBuildField, re.IGNORECASE):
RAN.backgroundBuild=True
else:
RAN.backgroundBuild=False
proxy_commit = test.findtext('proxy_commit')
if proxy_commit is not None:
CONTAINERS.proxyCommit = proxy_commit
if action == 'Build_eNB':
success = cls_native.Native.Build(HTML.testCase_id, HTML, RAN.eNBIPAddress, RAN.eNBSourceCodePath, RAN.Build_eNB_args)
elif action == 'Build_Image':
success = CONTAINERS.BuildImage(HTML)
elif action == 'Build_Proxy':
success = CONTAINERS.BuildProxy(HTML)
elif action == 'Build_Cluster_Image':
success = CLUSTER.BuildClusterImage(HTML)
elif action == 'Build_Run_Tests':
success = CONTAINERS.BuildRunTests(HTML)
elif action == 'Initialize_eNB':
datalog_rt_stats_file=test.findtext('rt_stats_cfg')
if datalog_rt_stats_file is None:
RAN.datalog_rt_stats_file='datalog_rt_stats.default.yaml'
else:
attach_status = True
html_queue = SimpleQueue()
while (not status_queue.empty()):
count = status_queue.get()
if (count < 0):
attach_status = False
device_id = status_queue.get()
message = status_queue.get()
if (count < 0):
html_cell = "<pre>UE (" + device_id + ")\n" + message + "</pre>"
else:
html_cell = "<pre>UE (" + device_id + ")\n" + message + ' in ' + str(count + 2) + ' seconds</pre>'
html_queue.put(html_cell)
if (attach_status):
self.CreateHtmlTestRowQueue('N/A', 'OK', len(self.UEDevices), html_queue)
else:
self.CreateHtmlTestRowQueue('N/A', 'KO', len(self.UEDevices), html_queue)
self.CreateHtmlFooter()
sys.exit(1)
def DetachUE_common(self, device_id):
try:
self.open(self.ADBIPAddress, self.ADBUserName, self.ADBPassword)
self.command('stdbuf -o0 adb -s ' + device_id + ' shell /data/local/tmp/off', '\$', 60)
logging.debug('\u001B[1mUE (' + device_id + ') Detach Completed\u001B[0m')
self.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def DetachUE(self):
if self.ADBIPAddress == '' or self.ADBUserName == '' or self.ADBPassword == '':
Usage()
sys.exit('Insufficient Parameter')
initialize_eNB_flag = False
pStatus = self.CheckProcessExist(initialize_eNB_flag)
if (pStatus < 0):
self.CreateHtmlTestRow('N/A', 'KO', pStatus)
self.CreateHtmlFooter()
sys.exit(1)
multi_jobs = []
for device_id in self.UEDevices:
p = Process(target = SSH.DetachUE_common, args = (device_id,))
p.daemon = True
p.start()
multi_jobs.append(p)
for job in multi_jobs:
job.join()
self.CreateHtmlTestRow('N/A', 'OK', 0)
def RebootUE_common(self, device_id):
try:
self.open(self.ADBIPAddress, self.ADBUserName, self.ADBPassword)
previousmDataConnectionStates = []
# Save mDataConnectionState
self.command('stdbuf -o0 adb -s ' + device_id + ' shell dumpsys telephony.registry | grep mDataConnectionState', '\$', 15)
self.command('stdbuf -o0 adb -s ' + device_id + ' shell dumpsys telephony.registry | grep mDataConnectionState', '\$', 15)
result = re.search('mDataConnectionState.*=(?P<state>[0-9\-]+)', str(self.ssh.before))
if result is None:
logging.debug('\u001B[1;37;41m mDataConnectionState Not Found! \u001B[0m')
sys.exit(1)
previousmDataConnectionStates.append(int(result.group('state')))
# Reboot UE
self.command('stdbuf -o0 adb -s ' + device_id + ' shell reboot', '\$', 10)
time.sleep(60)
previousmDataConnectionState = previousmDataConnectionStates.pop(0)
count = 180
while count > 0:
count = count - 1
self.command('stdbuf -o0 adb -s ' + device_id + ' shell dumpsys telephony.registry | grep mDataConnectionState', '\$', 15)
result = re.search('mDataConnectionState.*=(?P<state>[0-9\-]+)', str(self.ssh.before))
if result is None:
mDataConnectionState = None
else:
mDataConnectionState = int(result.group('state'))
logging.debug('mDataConnectionState = ' + result.group('state'))
if mDataConnectionState is None or (previousmDataConnectionState == 2 and mDataConnectionState != 2):
logging.debug('\u001B[1mWait UE (' + device_id + ') a second until reboot completion (' + str(180-count) + ' times)\u001B[0m')
time.sleep(1)
else:
logging.debug('\u001B[1mUE (' + device_id + ') Reboot Completed\u001B[0m')
break
if count == 0:
logging.debug('\u001B[1;37;41m UE (' + device_id + ') Reboot Failed \u001B[0m')
sys.exit(1)
self.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def RebootUE(self):
if self.ADBIPAddress == '' or self.ADBUserName == '' or self.ADBPassword == '':
Usage()
sys.exit('Insufficient Parameter')
initialize_eNB_flag = False
pStatus = self.CheckProcessExist(initialize_eNB_flag)
if (pStatus < 0):
self.CreateHtmlTestRow('N/A', 'KO', pStatus)
self.CreateHtmlFooter()
sys.exit(1)
multi_jobs = []
for device_id in self.UEDevices:
p = Process(target = SSH.RebootUE_common, args = (device_id,))
p.daemon = True
p.start()
multi_jobs.append(p)
for job in multi_jobs:
job.join()
self.CreateHtmlTestRow('N/A', 'OK', 0)
def GetAllUEDevices(self, terminate_ue_flag):
if self.ADBIPAddress == '' or self.ADBUserName == '' or self.ADBPassword == '':
Usage()
sys.exit('Insufficient Parameter')
self.open(self.ADBIPAddress, self.ADBUserName, self.ADBPassword)
self.command('adb devices', '\$', 15)
self.UEDevices = re.findall("\\\\r\\\\n([A-Za-z0-9]+)\\\\tdevice",str(self.ssh.before))
if terminate_ue_flag == False:
if len(self.UEDevices) == 0:
logging.debug('\u001B[1;37;41m UE Not Found! \u001B[0m')
sys.exit(1)
self.close()
def GetAllUEIPAddresses(self):
if self.ADBIPAddress == '' or self.ADBUserName == '' or self.ADBPassword == '':
Usage()
sys.exit('Insufficient Parameter')
self.UEIPAddresses = []
self.open(self.ADBIPAddress, self.ADBUserName, self.ADBPassword)
for device_id in self.UEDevices:
self.command('stdbuf -o0 adb -s ' + device_id + ' shell ip addr show | grep rmnet', '\$', 15)
result = re.search('inet (?P<ueipaddress>[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)\/[0-9]+[0-9a-zA-Z\.\s]+', str(self.ssh.before))
if result is None:
logging.debug('\u001B[1;37;41m UE IP Address Not Found! \u001B[0m')
sys.exit(1)
UE_IPAddress = result.group('ueipaddress')
logging.debug('\u001B[1mUE (' + device_id + ') IP Address is ' + UE_IPAddress + '\u001B[0m')
for ueipaddress in self.UEIPAddresses:
if ueipaddress == UE_IPAddress:
logging.debug('\u001B[1mUE (' + device_id + ') IP Address ' + UE_IPAddress + 'has been existed!' + '\u001B[0m')
sys.exit(1)
self.UEIPAddresses.append(UE_IPAddress)
self.close()
def Ping_common(self, lock, UE_IPAddress, device_id,statusQueue):
try:
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
self.command('cd ' + self.EPCSourceCodePath, '\$', 5)
self.command('cd scripts', '\$', 5)
ping_time = re.findall("-c (\d+)",str(self.ping_args))
self.command('stdbuf -o0 ping ' + self.ping_args + ' ' + UE_IPAddress + ' 2>&1 | stdbuf -o0 tee -a ping_' + SSH.testCase_id + '_' + device_id + '.log', '\$', int(ping_time[0])*1.5)
result = re.search(', (?P<packetloss>[0-9\.]+)% packet loss, time [0-9\.]+ms', str(self.ssh.before))
if result is None:
message = 'Packet Loss Not Found!'
logging.debug('\u001B[1;37;41m ' + message + ' \u001B[0m')
lock.acquire()
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put(message)
lock.release()
return
packetloss = result.group('packetloss')
if float(packetloss) == 100:
message = 'Packet Loss is 100%'
logging.debug('\u001B[1;37;41m ' + message + ' \u001B[0m')
lock.acquire()
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put(message)
lock.release()
return
result = re.search('rtt min\/avg\/max\/mdev = (?P<rtt_min>[0-9\.]+)\/(?P<rtt_avg>[0-9\.]+)\/(?P<rtt_max>[0-9\.]+)\/[0-9\.]+ ms', str(self.ssh.before))
if result is None:
message = 'Ping RTT_Min RTT_Avg RTT_Max Not Found!'
logging.debug('\u001B[1;37;41m ' + message + ' \u001B[0m')
lock.acquire()
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put(message)
lock.release()
return
rtt_min = result.group('rtt_min')
rtt_avg = result.group('rtt_avg')
rtt_max = result.group('rtt_max')
pal_msg = 'Packet Loss : ' + packetloss + '%'
min_msg = 'RTT(Min) : ' + rtt_min + ' ms'
avg_msg = 'RTT(Avg) : ' + rtt_avg + ' ms'
max_msg = 'RTT(Max) : ' + rtt_max + ' ms'
lock.acquire()
logging.debug('\u001B[1;37;44m ping result (' + UE_IPAddress + ') \u001B[0m')
logging.debug('\u001B[1;34m ' + pal_msg + '\u001B[0m')
logging.debug('\u001B[1;34m ' + min_msg + '\u001B[0m')
logging.debug('\u001B[1;34m ' + avg_msg + '\u001B[0m')
logging.debug('\u001B[1;34m ' + max_msg + '\u001B[0m')
qMsg = pal_msg + '\n' + min_msg + '\n' + avg_msg + '\n' + max_msg
packetLossOK = True
if packetloss is not None:
if float(packetloss) > float(self.ping_packetloss_threshold):
qMsg += '\nPacket Loss too high'
logging.debug('\u001B[1;37;41m Packet Loss too high \u001B[0m')
packetLossOK = False
elif float(packetloss) > 0:
qMsg += '\nPacket Loss is not 0%'
logging.debug('\u001B[1;37;43m Packet Loss is not 0% \u001B[0m')
if (packetLossOK):
statusQueue.put(0)
else:
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put(UE_IPAddress)
statusQueue.put(qMsg)
lock.release()
self.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def Ping(self):
if self.EPCIPAddress == '' or self.EPCUserName == '' or self.EPCPassword == '' or self.EPCSourceCodePath == '':
Usage()
sys.exit('Insufficient Parameter')
initialize_eNB_flag = False
pStatus = self.CheckProcessExist(initialize_eNB_flag)
if (pStatus < 0):
self.CreateHtmlTestRow(self.ping_args, 'KO', pStatus)
self.CreateHtmlFooter()
sys.exit(1)
self.GetAllUEIPAddresses()
multi_jobs = []
i = 0
lock = Lock()
status_queue = SimpleQueue()
for UE_IPAddress in self.UEIPAddresses:
device_id = self.UEDevices[i]
p = Process(target = SSH.Ping_common, args = (lock,UE_IPAddress,device_id,status_queue,))
p.daemon = True
p.start()
multi_jobs.append(p)
i = i + 1
for job in multi_jobs:
job.join()
if (status_queue.empty()):
self.CreateHtmlTestRow(self.ping_args, 'KO', 0)
self.CreateHtmlFooter()
sys.exit(1)
RAN.datalog_rt_stats_file=datalog_rt_stats_file
RAN.Initialize_eNB_args=test.findtext('Initialize_eNB_args')
eNB_instance=test.findtext('eNB_instance')
USRPIPAddress=test.findtext('USRP_IPAddress')
if USRPIPAddress is None:
RAN.USRPIPAddress=''
else:
ping_status = True
html_queue = SimpleQueue()
while (not status_queue.empty()):
count = status_queue.get()
if (count < 0):
ping_status = False
device_id = status_queue.get()
ip_addr = status_queue.get()
message = status_queue.get()
html_cell = "<pre>UE (" + device_id + ")\nIP Address : " + ip_addr + "\n" + message + "</pre>"
html_queue.put(html_cell)
if (ping_status):
self.CreateHtmlTestRowQueue(self.ping_args, 'OK', len(self.UEDevices), html_queue)
else:
self.CreateHtmlTestRowQueue(self.ping_args, 'KO', len(self.UEDevices), html_queue)
self.CreateHtmlFooter()
sys.exit(1)
def Iperf_ComputeTime(self):
result = re.search('-t (?P<iperf_time>\d+)', str(self.iperf_args))
if result is None:
logging.debug('\u001B[1;37;41m Iperf time Not Found! \u001B[0m')
sys.exit(1)
return result.group('iperf_time')
def Iperf_ComputeModifiedBW(self, idx, ue_num):
result = re.search('-b (?P<iperf_bandwidth>[0-9\.]+)[KMG]', str(self.iperf_args))
if result is None:
logging.debug('\u001B[1;37;41m Iperf bandwidth Not Found! \u001B[0m')
sys.exit(1)
iperf_bandwidth = result.group('iperf_bandwidth')
if SSH.iperf_profile == 'balanced':
iperf_bandwidth_new = float(iperf_bandwidth)/ue_num
if SSH.iperf_profile == 'single-ue':
iperf_bandwidth_new = float(iperf_bandwidth)
if SSH.iperf_profile == 'unbalanced':
# residual is 2% of max bw
residualBW = float(iperf_bandwidth) / 50
if idx == 0:
iperf_bandwidth_new = float(iperf_bandwidth) - ((ue_num - 1) * residualBW)
else:
iperf_bandwidth_new = residualBW
iperf_bandwidth_str = '-b ' + iperf_bandwidth
iperf_bandwidth_str_new = '-b ' + ('%.2f' % iperf_bandwidth_new)
result = re.sub(iperf_bandwidth_str, iperf_bandwidth_str_new, str(self.iperf_args))
if result is None:
logging.debug('\u001B[1;37;41m Calculate Iperf bandwidth Failed! \u001B[0m')
sys.exit(1)
return result
def Iperf_analyzeV2Output(self, lock, UE_IPAddress, device_id, statusQueue, iperf_real_options):
result = re.search('Server Report:', str(self.ssh.before))
if result is None:
result = re.search('read failed: Connection refused', str(self.ssh.before))
if result is not None:
logging.debug('\u001B[1;37;41m Could not connect to iperf server! \u001B[0m')
else:
logging.debug('\u001B[1;37;41m Server Report and Connection refused Not Found! \u001B[0m')
return -1
# Computing the requested bandwidth in float
result = re.search('-b (?P<iperf_bandwidth>[0-9\.]+)[KMG]', str(iperf_real_options))
if result is not None:
req_bandwidth = result.group('iperf_bandwidth')
req_bw = float(req_bandwidth)
result = re.search('-b [0-9\.]+K', str(iperf_real_options))
if result is not None:
req_bandwidth = '%.1f Kbits/sec' % req_bw
req_bw = req_bw * 1000
result = re.search('-b [0-9\.]+M', str(iperf_real_options))
if result is not None:
req_bandwidth = '%.1f Mbits/sec' % req_bw
req_bw = req_bw * 1000000
result = re.search('-b [0-9\.]+G', str(iperf_real_options))
if result is not None:
req_bandwidth = '%.1f Gbits/sec' % req_bw
req_bw = req_bw * 1000000000
result = re.search('Server Report:\\\\r\\\\n(?:|\[ *\d+\].*) (?P<bitrate>[0-9\.]+ [KMG]bits\/sec) +(?P<jitter>[0-9\.]+ ms) +(\d+\/..\d+) (\((?P<packetloss>[0-9\.]+)%\))', str(self.ssh.before))
if result is not None:
bitrate = result.group('bitrate')
packetloss = result.group('packetloss')
jitter = result.group('jitter')
lock.acquire()
logging.debug('\u001B[1;37;44m iperf result (' + UE_IPAddress + ') \u001B[0m')
iperfStatus = True
msg = 'Req Bitrate : ' + req_bandwidth + '\n'
logging.debug('\u001B[1;34m Req Bitrate : ' + req_bandwidth + '\u001B[0m')
if bitrate is not None:
msg += 'Bitrate : ' + bitrate + '\n'
logging.debug('\u001B[1;34m Bitrate : ' + bitrate + '\u001B[0m')
result = re.search('(?P<real_bw>[0-9\.]+) [KMG]bits/sec', str(bitrate))
if result is not None:
actual_bw = float(str(result.group('real_bw')))
result = re.search('[0-9\.]+ K', bitrate)
if result is not None:
actual_bw = actual_bw * 1000
result = re.search('[0-9\.]+ M', bitrate)
if result is not None:
actual_bw = actual_bw * 1000000
result = re.search('[0-9\.]+ G', bitrate)
if result is not None:
actual_bw = actual_bw * 1000000000
br_loss = 100 * actual_bw / req_bw
bitperf = '%.2f ' % br_loss
msg += 'Bitrate Perf: ' + bitperf + '%\n'
logging.debug('\u001B[1;34m Bitrate Perf: ' + bitperf + '%\u001B[0m')
if packetloss is not None:
msg += 'Packet Loss : ' + packetloss + '%\n'
logging.debug('\u001B[1;34m Packet Loss : ' + packetloss + '%\u001B[0m')
if float(packetloss) > float(self.iperf_packetloss_threshold):
msg += 'Packet Loss too high!\n'
logging.debug('\u001B[1;37;41m Packet Loss too high \u001B[0m')
iperfStatus = False
if jitter is not None:
msg += 'Jitter : ' + jitter + '\n'
logging.debug('\u001B[1;34m Jitter : ' + jitter + '\u001B[0m')
if (iperfStatus):
statusQueue.put(0)
else:
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put(UE_IPAddress)
statusQueue.put(msg)
lock.release()
return 0
def Iperf_analyzeV2Server(self, lock, UE_IPAddress, device_id, statusQueue, iperf_real_options):
if (not os.path.isfile('iperf_server_' + SSH.testCase_id + '_' + device_id + '.log')):
lock.acquire()
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put(UE_IPAddress)
statusQueue.put('Could not analyze from server log')
lock.release()
return
# Computing the requested bandwidth in float
result = re.search('-b (?P<iperf_bandwidth>[0-9\.]+)[KMG]', str(iperf_real_options))
if result is None:
logging.debug('Iperf bandwidth Not Found!')
lock.acquire()
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put(UE_IPAddress)
statusQueue.put('Could not compute Iperf bandwidth!')
lock.release()
return
RAN.USRPIPAddress=USRPIPAddress
if (eNB_instance is None):
RAN.eNB_instance=0
else:
req_bandwidth = result.group('iperf_bandwidth')
req_bw = float(req_bandwidth)
result = re.search('-b [0-9\.]+K', str(iperf_real_options))
if result is not None:
req_bandwidth = '%.1f Kbits/sec' % req_bw
req_bw = req_bw * 1000
result = re.search('-b [0-9\.]+M', str(iperf_real_options))
if result is not None:
req_bandwidth = '%.1f Mbits/sec' % req_bw
req_bw = req_bw * 1000000
result = re.search('-b [0-9\.]+G', str(iperf_real_options))
if result is not None:
req_bandwidth = '%.1f Gbits/sec' % req_bw
req_bw = req_bw * 1000000000
server_file = open('iperf_server_' + SSH.testCase_id + '_' + device_id + '.log', 'r')
br_sum = 0.0
ji_sum = 0.0
pl_sum = 0
ps_sum = 0
row_idx = 0
for line in server_file.readlines():
result = re.search('(?P<bitrate>[0-9\.]+ [KMG]bits\/sec) +(?P<jitter>[0-9\.]+ ms) +(?P<lostPack>[0-9]+)/ +(?P<sentPack>[0-9]+)', str(line))
if result is not None:
bitrate = result.group('bitrate')
jitter = result.group('jitter')
packetlost = result.group('lostPack')
packetsent = result.group('sentPack')
br = bitrate.split(' ')
ji = jitter.split(' ')
row_idx = row_idx + 1
curr_br = float(br[0])
pl_sum = pl_sum + int(packetlost)
ps_sum = ps_sum + int(packetsent)
if (br[1] == 'Kbits/sec'):
curr_br = curr_br * 1000
if (br[1] == 'Mbits/sec'):
curr_br = curr_br * 1000 * 1000
br_sum = curr_br + br_sum
ji_sum = float(ji[0]) + ji_sum
if (row_idx > 0):
br_sum = br_sum / row_idx
ji_sum = ji_sum / row_idx
br_loss = 100 * br_sum / req_bw
if (br_sum > 1000):
br_sum = br_sum / 1000
if (br_sum > 1000):
br_sum = br_sum / 1000
bitrate = '%.2f Mbits/sec' % br_sum
else:
bitrate = '%.2f Kbits/sec' % br_sum
else:
bitrate = '%.2f bits/sec' % br_sum
bitperf = '%.2f ' % br_loss
bitperf += '%'
jitter = '%.2f ms' % (ji_sum)
if (ps_sum > 0):
pl = float(100 * pl_sum / ps_sum)
packetloss = '%2.1f ' % (pl)
packetloss += '%'
else:
packetloss = 'unknown'
lock.acquire()
if (br_loss < 90):
statusQueue.put(1)
else:
statusQueue.put(0)
statusQueue.put(device_id)
statusQueue.put(UE_IPAddress)
req_msg = 'Req Bitrate : ' + req_bandwidth
bir_msg = 'Bitrate : ' + bitrate
brl_msg = 'Bitrate Perf: ' + bitperf
jit_msg = 'Jitter : ' + jitter
pal_msg = 'Packet Loss : ' + packetloss
statusQueue.put(req_msg + '\n' + bir_msg + '\n' + brl_msg + '\n' + jit_msg + '\n' + pal_msg + '\n')
logging.debug('\u001B[1;37;45m iperf result (' + UE_IPAddress + ') \u001B[0m')
logging.debug('\u001B[1;35m ' + req_msg + '\u001B[0m')
logging.debug('\u001B[1;35m ' + bir_msg + '\u001B[0m')
logging.debug('\u001B[1;35m ' + brl_msg + '\u001B[0m')
logging.debug('\u001B[1;35m ' + jit_msg + '\u001B[0m')
logging.debug('\u001B[1;35m ' + pal_msg + '\u001B[0m')
lock.release()
RAN.eNB_instance=int(eNB_instance)
eNB_serverId=test.findtext('eNB_serverId')
if (eNB_serverId is None):
RAN.eNB_serverId[RAN.eNB_instance]='0'
else:
lock.acquire()
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put(UE_IPAddress)
statusQueue.put('Could not analyze from server log')
lock.release()
server_file.close()
def Iperf_analyzeV3Output(self, lock, UE_IPAddress, device_id, statusQueue):
result = re.search('(?P<bitrate>[0-9\.]+ [KMG]bits\/sec) +(?:|[0-9\.]+ ms +\d+\/\d+ \((?P<packetloss>[0-9\.]+)%\)) +(?:|receiver)\\\\r\\\\n(?:|\[ *\d+\] Sent \d+ datagrams)\\\\r\\\\niperf Done\.', str(self.ssh.before))
if result is None:
result = re.search('(?P<error>iperf: error - [a-zA-Z0-9 :]+)', str(self.ssh.before))
lock.acquire()
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put(UE_IPAddress)
if result is not None:
logging.debug('\u001B[1;37;41m ' + result.group('error') + ' \u001B[0m')
statusQueue.put(result.group('error'))
else:
logging.debug('\u001B[1;37;41m Bitrate and/or Packet Loss Not Found! \u001B[0m')
statusQueue.put('Bitrate and/or Packet Loss Not Found!')
lock.release()
bitrate = result.group('bitrate')
packetloss = result.group('packetloss')
lock.acquire()
logging.debug('\u001B[1;37;44m iperf result (' + UE_IPAddress + ') \u001B[0m')
logging.debug('\u001B[1;34m Bitrate : ' + bitrate + '\u001B[0m')
msg = 'Bitrate : ' + bitrate + '\n'
iperfStatus = True
if packetloss is not None:
logging.debug('\u001B[1;34m Packet Loss : ' + packetloss + '%\u001B[0m')
msg += 'Packet Loss : ' + packetloss + '%\n'
if float(packetloss) > float(self.iperf_packetloss_threshold):
logging.debug('\u001B[1;37;41m Packet Loss too high \u001B[0m')
msg += 'Packet Loss too high!\n'
iperfStatus = False
if (iperfStatus):
statusQueue.put(0)
RAN.eNB_serverId[RAN.eNB_instance]=eNB_serverId
#local variable air_interface
air_interface = test.findtext('air_interface')
if (air_interface is None) or (air_interface.lower() not in ['nr','lte']):
RAN.air_interface[RAN.eNB_instance] = 'lte-softmodem'
else:
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put(UE_IPAddress)
lock.release()
def Iperf_UL_common(self, lock, UE_IPAddress, device_id, idx, ue_num, statusQueue):
ipnumbers = UE_IPAddress.split('.')
if (len(ipnumbers) == 4):
ipnumbers[3] = '1'
EPC_Iperf_UE_IPAddress = ipnumbers[0] + '.' + ipnumbers[1] + '.' + ipnumbers[2] + '.' + ipnumbers[3]
# Launch iperf server on EPC side
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
self.command('cd ' + self.EPCSourceCodePath + '/scripts', '\$', 5)
self.command('rm -f iperf_server_' + SSH.testCase_id + '_' + device_id + '.log', '\$', 5)
port = 5001 + idx
self.command('echo $USER; nohup iperf -u -s -i 1 -p ' + str(port) + ' > iperf_server_' + SSH.testCase_id + '_' + device_id + '.log &', self.EPCUserName, 5)
time.sleep(0.5)
self.close()
# Launch iperf client on UE
self.open(self.ADBIPAddress, self.ADBUserName, self.ADBPassword)
self.command('cd ' + self.EPCSourceCodePath + '/scripts', '\$', 5)
iperf_time = self.Iperf_ComputeTime()
time.sleep(0.5)
modified_options = self.Iperf_ComputeModifiedBW(idx, ue_num)
modified_options = modified_options.replace('-R','')
time.sleep(0.5)
self.command('rm -f iperf_' + SSH.testCase_id + '_' + device_id + '.log', '\$', 5)
self.command('stdbuf -o0 adb -s ' + device_id + ' shell "/data/local/tmp/iperf -c ' + EPC_Iperf_UE_IPAddress + ' ' + modified_options + ' -p ' + str(port) + '" 2>&1 | stdbuf -o0 tee -a iperf_' + SSH.testCase_id + '_' + device_id + '.log', '\$', int(iperf_time)*5.0)
clientStatus = self.Iperf_analyzeV2Output(lock, UE_IPAddress, device_id, statusQueue, modified_options)
# Launch iperf server on EPC side
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
self.command('killall --signal SIGKILL iperf', self.EPCUserName, 5)
self.close()
if (clientStatus == -1):
time.sleep(1)
if (os.path.isfile('iperf_server_' + SSH.testCase_id + '_' + device_id + '.log')):
os.remove('iperf_server_' + SSH.testCase_id + '_' + device_id + '.log')
self.copy(self.EPCIPAddress, self.EPCUserName, self.EPCPassword, self.EPCSourceCodePath + '/scripts/iperf_server_' + SSH.testCase_id + '_' + device_id + '.log', '.')
self.Iperf_analyzeV2Server(lock, UE_IPAddress, device_id, statusQueue, modified_options)
def Iperf_common(self, lock, UE_IPAddress, device_id, idx, ue_num, statusQueue):
try:
# Single-UE profile -- iperf only on one UE
if SSH.iperf_profile == 'single-ue' and idx != 0:
return
useIperf3 = False
self.open(self.ADBIPAddress, self.ADBUserName, self.ADBPassword)
# if by chance ADB server and EPC are on the same remote host, at least log collection will take care of it
self.command('if [ ! -d ' + self.EPCSourceCodePath + '/scripts ]; then mkdir -p ' + self.EPCSourceCodePath + '/scripts ; fi', '\$', 5)
self.command('cd ' + self.EPCSourceCodePath + '/scripts', '\$', 5)
# Checking if iperf / iperf3 are installed
self.command('adb -s ' + device_id + ' shell "ls /data/local/tmp"', '\$', 5)
result = re.search('iperf3', str(self.ssh.before))
if result is None:
result = re.search('iperf', str(self.ssh.before))
if result is None:
message = 'Neither iperf nor iperf3 installed on UE!'
lock.acquire()
logging.debug('\u001B[1;37;41m ' + message + ' \u001B[0m')
statusQueue.put(-1)
statusQueue.put(device_id)
statusQueue.put(UE_IPAddress)
statusQueue.put(message)
lock.release()
return
#sys.exit(1)
else:
useIperf3 = True
# in case of iperf, UL has its own function
if (not useIperf3):
result = re.search('-R', str(self.iperf_args))
if result is not None:
self.close()
self.Iperf_UL_common(lock, UE_IPAddress, device_id, idx, ue_num, statusQueue)
return
if (useIperf3):
self.command('stdbuf -o0 adb -s ' + device_id + ' shell /data/local/tmp/iperf3 -s &', '\$', 5)
else:
self.command('rm -f iperf_server_' + SSH.testCase_id + '_' + device_id + '.log', '\$', 5)
self.command('echo $USER; nohup adb -s ' + device_id + ' shell "/data/local/tmp/iperf -u -s -i 1" > iperf_server_' + SSH.testCase_id + '_' + device_id + '.log &', self.ADBUserName, 5)
time.sleep(0.5)
self.close()
RAN.air_interface[RAN.eNB_instance] = air_interface.lower() +'-softmodem'
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
self.command('cd ' + self.EPCSourceCodePath + '/scripts', '\$', 5)
iperf_time = self.Iperf_ComputeTime()
time.sleep(0.5)
cmd_prefix = test.findtext('cmd_prefix')
if cmd_prefix is not None: RAN.cmd_prefix = cmd_prefix
success = RAN.InitializeeNB(HTML, EPC)
modified_options = self.Iperf_ComputeModifiedBW(idx, ue_num)
time.sleep(0.5)
self.command('rm -f iperf_' + SSH.testCase_id + '_' + device_id + '.log', '\$', 5)
if (useIperf3):
self.command('stdbuf -o0 iperf3 -c ' + UE_IPAddress + ' ' + modified_options + ' 2>&1 | stdbuf -o0 tee -a iperf_' + SSH.testCase_id + '_' + device_id + '.log', '\$', int(iperf_time)*5.0)
clientStatus = 0
self.Iperf_analyzeV3Output(lock, UE_IPAddress, device_id, statusQueue)
else:
self.command('stdbuf -o0 iperf -c ' + UE_IPAddress + ' ' + modified_options + ' 2>&1 | stdbuf -o0 tee -a iperf_' + SSH.testCase_id + '_' + device_id + '.log', '\$', int(iperf_time)*5.0)
clientStatus = self.Iperf_analyzeV2Output(lock, UE_IPAddress, device_id, statusQueue, modified_options)
self.close()
self.open(self.ADBIPAddress, self.ADBUserName, self.ADBPassword)
self.command('stdbuf -o0 adb -s ' + device_id + ' shell ps | grep --color=never iperf | grep -v grep', '\$', 5)
result = re.search('shell +(?P<pid>\d+)', str(self.ssh.before))
if result is not None:
pid_iperf = result.group('pid')
self.command('stdbuf -o0 adb -s ' + device_id + ' shell kill -KILL ' + pid_iperf, '\$', 5)
self.close()
if (clientStatus == -1):
time.sleep(1)
if (os.path.isfile('iperf_server_' + SSH.testCase_id + '_' + device_id + '.log')):
os.remove('iperf_server_' + SSH.testCase_id + '_' + device_id + '.log')
self.copy(self.ADBIPAddress, self.ADBUserName, self.ADBPassword, self.EPCSourceCodePath + '/scripts/iperf_server_' + SSH.testCase_id + '_' + device_id + '.log', '.')
self.Iperf_analyzeV2Server(lock, UE_IPAddress, device_id, statusQueue, modified_options)
except:
os.kill(os.getppid(),signal.SIGUSR1)
def Iperf(self):
if self.EPCIPAddress == '' or self.EPCUserName == '' or self.EPCPassword == '' or self.EPCSourceCodePath == '' or self.ADBIPAddress == '' or self.ADBUserName == '' or self.ADBPassword == '':
Usage()
sys.exit('Insufficient Parameter')
initialize_eNB_flag = False
pStatus = self.CheckProcessExist(initialize_eNB_flag)
if (pStatus < 0):
self.CreateHtmlTestRow(self.iperf_args, 'KO', pStatus)
self.CreateHtmlFooter()
sys.exit(1)
self.GetAllUEIPAddresses()
multi_jobs = []
i = 0
ue_num = len(self.UEIPAddresses)
lock = Lock()
status_queue = SimpleQueue()
for UE_IPAddress in self.UEIPAddresses:
device_id = self.UEDevices[i]
p = Process(target = SSH.Iperf_common, args = (lock,UE_IPAddress,device_id,i,ue_num,status_queue,))
p.daemon = True
p.start()
multi_jobs.append(p)
i = i + 1
for job in multi_jobs:
job.join()
if (status_queue.empty()):
self.CreateHtmlTestRow(self.iperf_args, 'KO', 0)
self.CreateHtmlFooter()
sys.exit(1)
elif action == 'Terminate_eNB':
eNB_instance=test.findtext('eNB_instance')
if (eNB_instance is None):
RAN.eNB_instance=0
else:
iperf_status = True
iperf_noperf = False
html_queue = SimpleQueue()
while (not status_queue.empty()):
count = status_queue.get()
if (count < 0):
iperf_status = False
if (count > 0):
iperf_noperf = True
device_id = status_queue.get()
ip_addr = status_queue.get()
message = status_queue.get()
html_cell = "<pre>UE (" + device_id + ")\nIP Address : " + ip_addr + "\n" + message + "</pre>"
html_queue.put(html_cell)
if (iperf_noperf and iperf_status):
self.CreateHtmlTestRowQueue(self.iperf_args, 'PERF NOT MET', len(self.UEDevices), html_queue)
elif (iperf_status):
self.CreateHtmlTestRowQueue(self.iperf_args, 'OK', len(self.UEDevices), html_queue)
else:
self.CreateHtmlTestRowQueue(self.iperf_args, 'KO', len(self.UEDevices), html_queue)
self.CreateHtmlFooter()
sys.exit(1)
def CheckProcessExist(self, initialize_eNB_flag):
multi_jobs = []
status_queue = SimpleQueue()
p = Process(target = SSH.CheckHSSProcess, args = (status_queue,))
p.daemon = True
p.start()
multi_jobs.append(p)
p = Process(target = SSH.CheckMMEProcess, args = (status_queue,))
p.daemon = True
p.start()
multi_jobs.append(p)
p = Process(target = SSH.CheckSPGWProcess, args = (status_queue,))
p.daemon = True
p.start()
multi_jobs.append(p)
if initialize_eNB_flag == False:
p = Process(target = SSH.CheckeNBProcess, args = (status_queue,))
p.daemon = True
p.start()
multi_jobs.append(p)
for job in multi_jobs:
job.join()
if (status_queue.empty()):
return -15
RAN.eNB_instance=int(eNB_instance)
eNB_serverId=test.findtext('eNB_serverId')
if (eNB_serverId is None):
RAN.eNB_serverId[RAN.eNB_instance]='0'
else:
result = 0
while (not status_queue.empty()):
status = status_queue.get()
if (status < 0):
result = status
return result
def CheckeNBProcess(self, status_queue):
try:
self.open(self.eNBIPAddress, self.eNBUserName, self.eNBPassword)
self.command('stdbuf -o0 ps -aux | grep -v grep | grep --color=never lte-softmodem', '\$', 5)
result = re.search('lte-softmodem', str(self.ssh.before))
if result is None:
logging.debug('\u001B[1;37;41m eNB Process Not Found! \u001B[0m')
#sys.exit(1)
status_queue.put(-1)
else:
status_queue.put(1)
self.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def CheckHSSProcess(self, status_queue):
try:
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
self.command('stdbuf -o0 ps -aux | grep -v grep | grep --color=never hss', '\$', 5)
if re.match('OAI', self.EPCType, re.IGNORECASE):
result = re.search('\/bin\/bash .\/run_', str(self.ssh.before))
else:
result = re.search('hss_sim s6as diam_hss', str(self.ssh.before))
if result is None:
logging.debug('\u001B[1;37;41m HSS Process Not Found! \u001B[0m')
status_queue.put(-2)
#sys.exit(1)
else:
status_queue.put(2)
self.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def CheckMMEProcess(self, status_queue):
try:
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
self.command('stdbuf -o0 ps -aux | grep -v grep | grep --color=never mme', '\$', 5)
if re.match('OAI', self.EPCType, re.IGNORECASE):
result = re.search('\/bin\/bash .\/run_', str(self.ssh.before))
else:
result = re.search('mme', str(self.ssh.before))
if result is None:
logging.debug('\u001B[1;37;41m MME Process Not Found! \u001B[0m')
status_queue.put(-3)
#sys.exit(1)
else:
status_queue.put(3)
self.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def CheckSPGWProcess(self, status_queue):
try:
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
if re.match('OAI', self.EPCType, re.IGNORECASE):
self.command('stdbuf -o0 ps -aux | grep -v grep | grep --color=never spgw', '\$', 5)
result = re.search('\/bin\/bash .\/run_', str(self.ssh.before))
else:
self.command('stdbuf -o0 ps -aux | grep -v grep | grep --color=never xGw', '\$', 5)
result = re.search('xGw', str(self.ssh.before))
if result is None:
logging.debug('\u001B[1;37;41m SPGW Process Not Found! \u001B[0m')
status_queue.put(-4)
#sys.exit(1)
else:
status_queue.put(4)
self.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def TerminateeNB(self):
self.open(self.eNBIPAddress, self.eNBUserName, self.eNBPassword)
self.command('cd ' + self.eNBSourceCodePath + '/cmake_targets', '\$', 5)
self.command('echo ' + self.eNBPassword + ' | sudo -S daemon --name=enb_daemon --stop', '\$', 5)
self.command('rm -f my-lte-softmodem-run.sh', '\$', 5)
self.command('echo ' + self.eNBPassword + ' | sudo -S killall --signal SIGINT lte-softmodem || true', '\$', 5)
time.sleep(5)
self.command('stdbuf -o0 ps -aux | grep -v grep | grep lte-softmodem', '\$', 5)
result = re.search('lte-softmodem', str(self.ssh.before))
if result is not None:
self.command('echo ' + self.eNBPassword + ' | sudo -S killall --signal SIGKILL lte-softmodem || true', '\$', 5)
self.close()
self.CreateHtmlTestRow('N/A', 'OK', 0)
def TerminateHSS(self):
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
if re.match('OAI', self.EPCType, re.IGNORECASE):
self.command('echo ' + self.EPCPassword + ' | sudo -S killall --signal SIGINT run_hss oai_hss || true', '\$', 5)
time.sleep(2)
self.command('stdbuf -o0 ps -aux | grep -v grep | grep hss', '\$', 5)
result = re.search('\/bin\/bash .\/run_', str(self.ssh.before))
if result is not None:
self.command('echo ' + self.EPCPassword + ' | sudo -S killall --signal SIGKILL run_hss oai_hss || true', '\$', 5)
RAN.eNB_serverId[RAN.eNB_instance]=eNB_serverId
#retx checkers
string_field=test.findtext('d_retx_th')
if (string_field is not None):
RAN.ran_checkers['d_retx_th'] = [float(x) for x in string_field.split(',')]
string_field=test.findtext('u_retx_th')
if (string_field is not None):
RAN.ran_checkers['u_retx_th'] = [float(x) for x in string_field.split(',')]
#local variable air_interface
air_interface = test.findtext('air_interface')
if (air_interface is None) or (air_interface.lower() not in ['nr','lte']):
RAN.air_interface[RAN.eNB_instance] = 'lte-softmodem'
else:
self.command('cd ' + self.EPCSourceCodePath, '\$', 5)
self.command('cd scripts', '\$', 5)
self.command('rm -f ./kill_hss.sh', '\$', 5)
self.command('echo ' + self.EPCPassword + ' | sudo -S daemon --name=simulated_hss --stop', '\$', 5)
time.sleep(2)
self.command('ps -aux | egrep --color=never "hss_sim|simulated_hss" | grep -v grep | awk \'BEGIN{n=0}{pidId[n]=$2;n=n+1}END{print "kill -9 " pidId[0] " " pidId[1]}\' > ./kill_hss.sh', '\$', 5)
self.command('chmod 755 ./kill_hss.sh', '\$', 5)
self.command('echo ' + self.EPCPassword + ' | sudo -S ./kill_hss.sh', '\$', 5)
self.command('rm ./kill_hss.sh', '\$', 5)
self.close()
self.CreateHtmlTestRow('N/A', 'OK', 0)
def TerminateMME(self):
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
if re.match('OAI', self.EPCType, re.IGNORECASE):
self.command('echo ' + self.EPCPassword + ' | sudo -S killall --signal SIGINT run_mme mme || true', '\$', 5)
time.sleep(2)
self.command('stdbuf -o0 ps -aux | grep -v grep | grep mme', '\$', 5)
result = re.search('\/bin\/bash .\/run_', str(self.ssh.before))
if result is not None:
self.command('echo ' + self.EPCPassword + ' | sudo -S killall --signal SIGKILL run_mme mme || true', '\$', 5)
RAN.air_interface[RAN.eNB_instance] = air_interface.lower() +'-softmodem'
success = RAN.TerminateeNB(HTML, EPC)
elif action == 'Initialize_UE' or action == 'Attach_UE' or action == 'Detach_UE' or action == 'Terminate_UE' or action == 'CheckStatusUE' or action == 'DataEnable_UE' or action == 'DataDisable_UE':
CiTestObj.ue_ids = test.findtext('id').split(' ')
if force_local:
# Change all execution targets to localhost
CiTestObj.nodes = ['localhost'] * len(CiTestObj.ue_ids)
else:
self.command('cd /opt/ltebox/tools', '\$', 5)
self.command('echo ' + self.EPCPassword + ' | sudo -S ./stop_mme', '\$', 5)
self.close()
self.CreateHtmlTestRow('N/A', 'OK', 0)
def TerminateSPGW(self):
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
if re.match('OAI', self.EPCType, re.IGNORECASE):
self.command('echo ' + self.EPCPassword + ' | sudo -S killall --signal SIGINT run_spgw spgw || true', '\$', 5)
time.sleep(2)
self.command('stdbuf -o0 ps -aux | grep -v grep | grep spgw', '\$', 5)
result = re.search('\/bin\/bash .\/run_', str(self.ssh.before))
if result is not None:
self.command('echo ' + self.EPCPassword + ' | sudo -S killall --signal SIGKILL run_spgw spgw || true', '\$', 5)
if test.findtext('nodes'):
CiTestObj.nodes = test.findtext('nodes').split(' ')
if len(CiTestObj.ue_ids) != len(CiTestObj.nodes):
logging.error('Number of Nodes are not equal to the total number of UEs')
sys.exit("Mismatch in number of Nodes and UIs")
else:
CiTestObj.nodes = [None] * len(CiTestObj.ue_ids)
if action == 'Initialize_UE':
success = CiTestObj.InitializeUE(HTML)
elif action == 'Attach_UE':
success = CiTestObj.AttachUE(HTML)
elif action == 'Detach_UE':
success = CiTestObj.DetachUE(HTML)
elif action == 'Terminate_UE':
success = CiTestObj.TerminateUE(HTML)
elif action == 'CheckStatusUE':
success = CiTestObj.CheckStatusUE(HTML)
elif action == 'DataEnable_UE':
success = CiTestObj.DataEnableUE(HTML)
elif action == 'DataDisable_UE':
success = CiTestObj.DataDisableUE(HTML)
elif action == 'Ping':
CiTestObj.ping_args = test.findtext('ping_args')
CiTestObj.ping_packetloss_threshold = test.findtext('ping_packetloss_threshold')
CiTestObj.ue_ids = test.findtext('id').split(' ')
if force_local:
# Change all execution targets to localhost
CiTestObj.nodes = ['localhost'] * len(CiTestObj.ue_ids)
else:
self.command('cd /opt/ltebox/tools', '\$', 5)
self.command('echo ' + self.EPCPassword + ' | sudo -S ./stop_xGw', '\$', 5)
self.close()
self.CreateHtmlTestRow('N/A', 'OK', 0)
def TerminateUE_common(self, device_id):
try:
self.open(self.ADBIPAddress, self.ADBUserName, self.ADBPassword)
self.command('stdbuf -o0 adb -s ' + device_id + ' shell /data/local/tmp/off', '\$', 60)
logging.debug('\u001B[1mUE (' + device_id + ') Detach Completed\u001B[0m')
self.command('stdbuf -o0 adb -s ' + device_id + ' shell ps | grep --color=never iperf | grep -v grep', '\$', 5)
result = re.search('shell +(?P<pid>\d+)', str(self.ssh.before))
if result is not None:
pid_iperf = result.group('pid')
self.command('stdbuf -o0 adb -s ' + device_id + ' shell kill -KILL ' + pid_iperf, '\$', 5)
self.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def TerminateUE(self):
terminate_ue_flag = True
SSH.GetAllUEDevices(terminate_ue_flag)
multi_jobs = []
for device_id in self.UEDevices:
p = Process(target= SSH.TerminateUE_common, args = (device_id,))
p.daemon = True
p.start()
multi_jobs.append(p)
for job in multi_jobs:
job.join()
self.CreateHtmlTestRow('N/A', 'OK', 0)
def LogCollectBuild(self):
self.open(self.eNBIPAddress, self.eNBUserName, self.eNBPassword)
self.command('cd ' + self.eNBSourceCodePath, '\$', 5)
self.command('cd cmake_targets', '\$', 5)
self.command('rm -f build.log.zip', '\$', 5)
self.command('zip build.log.zip build_log_*/*', '\$', 60)
self.command('echo ' + self.eNBPassword + ' | sudo -S rm -rf build_log_*', '\$', 5)
self.close()
def LogCollecteNB(self):
self.open(self.eNBIPAddress, self.eNBUserName, self.eNBPassword)
self.command('cd ' + self.eNBSourceCodePath, '\$', 5)
self.command('cd cmake_targets', '\$', 5)
self.command('rm -f enb.log.zip', '\$', 5)
self.command('zip enb.log.zip enb*.log', '\$', 60)
self.command('echo ' + self.eNBPassword + ' | sudo -S rm enb*.log', '\$', 5)
self.close()
def LogCollectPing(self):
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
self.command('cd ' + self.EPCSourceCodePath, '\$', 5)
self.command('cd scripts', '\$', 5)
self.command('rm -f ping.log.zip', '\$', 5)
self.command('zip ping.log.zip ping*.log', '\$', 60)
self.command('rm ping*.log', '\$', 5)
self.close()
def LogCollectIperf(self):
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
self.command('cd ' + self.EPCSourceCodePath, '\$', 5)
self.command('cd scripts', '\$', 5)
self.command('rm -f iperf.log.zip', '\$', 5)
self.command('zip iperf.log.zip iperf*.log', '\$', 60)
self.command('rm iperf*.log', '\$', 5)
self.close()
def LogCollectHSS(self):
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
self.command('cd ' + self.EPCSourceCodePath, '\$', 5)
self.command('cd scripts', '\$', 5)
self.command('rm -f hss.log.zip', '\$', 5)
if re.match('OAI', self.EPCType, re.IGNORECASE):
self.command('zip hss.log.zip hss*.log', '\$', 60)
self.command('rm hss*.log', '\$', 5)
if test.findtext('nodes'):
CiTestObj.nodes = test.findtext('nodes').split(' ')
if len(CiTestObj.ue_ids) != len(CiTestObj.nodes):
logging.error('Number of Nodes are not equal to the total number of UEs')
sys.exit("Mismatch in number of Nodes and UIs")
else:
CiTestObj.nodes = [None] * len(CiTestObj.ue_ids)
ping_rttavg_threshold = test.findtext('ping_rttavg_threshold') or ''
success = CiTestObj.Ping(HTML,EPC,CONTAINERS)
elif action == 'Iperf' or action == 'Iperf2_Unidir':
CiTestObj.iperf_args = test.findtext('iperf_args')
CiTestObj.ue_ids = test.findtext('id').split(' ')
CiTestObj.svr_id = test.findtext('svr_id') or None
if force_local:
# Change all execution targets to localhost
CiTestObj.nodes = ['localhost'] * len(CiTestObj.ue_ids)
else:
self.command('cp /opt/hss_sim0609/hss.log .', '\$', 60)
self.command('zip hss.log.zip hss.log', '\$', 60)
self.close()
def LogCollectMME(self):
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
self.command('cd ' + self.EPCSourceCodePath, '\$', 5)
self.command('cd scripts', '\$', 5)
self.command('rm -f mme.log.zip', '\$', 5)
if re.match('OAI', self.EPCType, re.IGNORECASE):
self.command('zip mme.log.zip mme*.log', '\$', 60)
self.command('rm mme*.log', '\$', 5)
if test.findtext('nodes'):
CiTestObj.nodes = test.findtext('nodes').split(' ')
if len(CiTestObj.ue_ids) != len(CiTestObj.nodes):
logging.error('Number of Nodes are not equal to the total number of UEs')
sys.exit("Mismatch in number of Nodes and UIs")
else:
CiTestObj.nodes = [None] * len(CiTestObj.ue_ids)
if test.findtext('svr_node'):
CiTestObj.svr_node = test.findtext('svr_node') if not force_local else 'localhost'
CiTestObj.iperf_packetloss_threshold = test.findtext('iperf_packetloss_threshold')
CiTestObj.iperf_bitrate_threshold = test.findtext('iperf_bitrate_threshold') or '90'
CiTestObj.iperf_profile = test.findtext('iperf_profile') or 'balanced'
CiTestObj.iperf_tcp_rate_target = test.findtext('iperf_tcp_rate_target') or None
if CiTestObj.iperf_profile != 'balanced' and CiTestObj.iperf_profile != 'unbalanced' and CiTestObj.iperf_profile != 'single-ue':
logging.error(f'test-case has wrong profile {CiTestObj.iperf_profile}, forcing balanced')
CiTestObj.iperf_profile = 'balanced'
CiTestObj.iperf_options = test.findtext('iperf_options') or 'check'
if CiTestObj.iperf_options != 'check' and CiTestObj.iperf_options != 'sink':
logging.error('test-case has wrong option ' + CiTestObj.iperf_options)
CiTestObj.iperf_options = 'check'
if action == 'Iperf':
success = CiTestObj.Iperf(HTML, EPC, CONTAINERS)
elif action == 'Iperf2_Unidir':
success = CiTestObj.Iperf2_Unidir(HTML, EPC, CONTAINERS)
elif action == 'IdleSleep':
st = test.findtext('idle_sleep_time_in_sec') or "5"
success = cls_oaicitest.IdleSleep(HTML, int(st))
elif action == 'Deploy_Run_PhySim':
success = PHYSIM.Deploy_PhySim(HTML)
elif action == 'Initialize_MME':
string_field = test.findtext('option')
if (string_field is not None):
EPC.mmeConfFile = string_field
success = EPC.InitializeMME(HTML)
elif action == 'Initialize_HSS' or action == 'Initialize_SPGW':
if action == 'Initialize_HSS':
success = EPC.InitializeHSS(HTML)
elif action == 'Initialize_SPGW':
success = EPC.InitializeSPGW(HTML)
elif action == 'Terminate_HSS' or action == 'Terminate_MME' or action == 'Terminate_SPGW':
if action == 'Terminate_HSS':
success = EPC.TerminateHSS(HTML)
elif action == 'Terminate_MME':
success = EPC.TerminateMME(HTML)
elif action == 'Terminate_SPGW':
success = EPC.TerminateSPGW(HTML)
elif action == 'Deploy_EPC':
string_field = test.findtext('parameters')
if (string_field is not None):
EPC.yamlPath = string_field
success = EPC.DeployEpc(HTML)
elif action == 'Undeploy_EPC':
success = EPC.UndeployEpc(HTML)
elif action == 'Initialize_5GCN':
string_field = test.findtext('args')
if (string_field is not None):
EPC.cfgDeploy = string_field
EPC.cnID = test.findtext('cn_id')
success = EPC.Initialize5GCN(HTML)
elif action == 'Terminate_5GCN':
string_field = test.findtext('args')
if (string_field is not None):
EPC.cfgUnDeploy = string_field
EPC.cnID = test.findtext('cn_id')
success = EPC.Terminate5GCN(HTML)
elif action == 'Deploy_Object' or action == 'Undeploy_Object' or action == "Create_Workspace":
eNB_instance=test.findtext('eNB_instance')
if (eNB_instance is None):
CONTAINERS.eNB_instance=0
else:
self.command('cp /opt/ltebox/var/log/*Log.0 .', '\$', 5)
self.command('zip mme.log.zip mmeLog.0 s1apcLog.0 s1apsLog.0 s11cLog.0 libLog.0 s1apCodecLog.0', '\$', 60)
self.close()
def LogCollectSPGW(self):
self.open(self.EPCIPAddress, self.EPCUserName, self.EPCPassword)
self.command('cd ' + self.EPCSourceCodePath, '\$', 5)
self.command('cd scripts', '\$', 5)
self.command('rm -f spgw.log.zip', '\$', 5)
if re.match('OAI', self.EPCType, re.IGNORECASE):
self.command('zip spgw.log.zip spgw*.log', '\$', 60)
self.command('rm spgw*.log', '\$', 5)
CONTAINERS.eNB_instance=int(eNB_instance)
eNB_serverId=test.findtext('eNB_serverId')
if (eNB_serverId is None):
CONTAINERS.eNB_serverId[CONTAINERS.eNB_instance]='0'
else:
self.command('cp /opt/ltebox/var/log/xGwLog.0 .', '\$', 5)
self.command('zip spgw.log.zip xGwLog.0', '\$', 60)
self.close()
#-----------------------------------------------------------
# HTML Reporting....
#-----------------------------------------------------------
def CreateHtmlHeader(self):
if (not self.htmlHeaderCreated):
self.htmlFile = open('test_results.html', 'w')
self.htmlFile.write('<!DOCTYPE html>\n')
self.htmlFile.write('<html class="no-js" lang="en-US">\n')
self.htmlFile.write('<head>\n')
self.htmlFile.write(' <title>Test Results for TEMPLATE_JOB_NAME job build #TEMPLATE_BUILD_ID</title>\n')
self.htmlFile.write(' <base href = "http://www.openairinterface.org/" />\n')
self.htmlFile.write('</head>\n')
self.htmlFile.write('<body>\n')
self.htmlFile.write(' <table style="border-collapse: collapse; border: none;">\n')
self.htmlFile.write(' <tr style="border-collapse: collapse; border: none;">\n')
self.htmlFile.write(' <td style="border-collapse: collapse; border: none;">\n')
self.htmlFile.write(' <a href="http://www.openairinterface.org/">\n')
self.htmlFile.write(' <img src="/wp-content/uploads/2016/03/cropped-oai_final_logo2.png" alt="" border="none" height=50 width=150>\n')
self.htmlFile.write(' </img>\n')
self.htmlFile.write(' </a>\n')
self.htmlFile.write(' </td>\n')
self.htmlFile.write(' <td style="border-collapse: collapse; border: none; vertical-align: center;">\n')
self.htmlFile.write(' <b><font size = "6">Job Summary -- Job: TEMPLATE_JOB_NAME -- Build-ID: TEMPLATE_BUILD_ID</font></b>\n')
self.htmlFile.write(' </td>\n')
self.htmlFile.write(' </tr>\n')
self.htmlFile.write(' </table>\n')
self.htmlFile.write(' <br>\n')
self.htmlFile.write(' <table border = "1">\n')
self.htmlFile.write(' <tr>\n')
self.htmlFile.write(' <td bgcolor = "lightcyan" >GIT Repository</td>\n')
self.htmlFile.write(' <td><a href="' + SSH.eNBRepository + '">' + SSH.eNBRepository + '</a></td>\n')
self.htmlFile.write(' </tr>\n')
self.htmlFile.write(' <tr>\n')
self.htmlFile.write(' <td bgcolor = "lightcyan" >Job Trigger</td>\n')
if (SSH.eNB_AllowMerge):
self.htmlFile.write(' <td>Merge-Request</td>\n')
else:
self.htmlFile.write(' <td>Push to Branch</td>\n')
self.htmlFile.write(' </tr>\n')
self.htmlFile.write(' <tr>\n')
if (SSH.eNB_AllowMerge):
self.htmlFile.write(' <td bgcolor = "lightcyan" >Source Branch</td>\n')
else:
self.htmlFile.write(' <td bgcolor = "lightcyan" >Branch</td>\n')
self.htmlFile.write(' <td>' + SSH.eNBBranch + '</td>\n')
self.htmlFile.write(' </tr>\n')
self.htmlFile.write(' <tr>\n')
if (SSH.eNB_AllowMerge):
self.htmlFile.write(' <td bgcolor = "lightcyan" >Source Commit ID</td>\n')
else:
self.htmlFile.write(' <td bgcolor = "lightcyan" >Commit ID</td>\n')
self.htmlFile.write(' <td>' + SSH.eNBCommitID + '</td>\n')
self.htmlFile.write(' </tr>\n')
if (SSH.eNB_AllowMerge):
self.htmlFile.write(' <tr>\n')
self.htmlFile.write(' <td bgcolor = "lightcyan" >Target Branch</td>\n')
self.htmlFile.write(' <td>develop</td>\n')
self.htmlFile.write(' </tr>\n')
self.htmlFile.write(' </table>\n')
terminate_ue_flag = True
SSH.GetAllUEDevices(terminate_ue_flag)
self.htmlUEConnected = len(self.UEDevices)
self.htmlFile.write('<h2>' + str(self.htmlUEConnected) + ' UE(s) is(are) connected to ADB bench server</h2>\n')
self.htmlFile.write(' <br>\n')
self.htmlFile.write(' <h2>Test Summary for ' + SSH.testXMLfile + '</h2>\n')
self.htmlFile.write(' <table border = "1">\n')
self.htmlFile.write(' <tr bgcolor = "#33CCFF" >\n')
self.htmlFile.write(' <th>Test Id</th>\n')
self.htmlFile.write(' <th>Test Desc</th>\n')
self.htmlFile.write(' <th>Test Options</th>\n')
self.htmlFile.write(' <th>Test Status</th>\n')
i = 0
while (i < self.htmlUEConnected):
self.htmlFile.write(' <th>UE' + str(i) + ' Status</th>\n')
i += 1
self.htmlFile.write(' </tr>\n')
self.htmlHeaderCreated = True
def CreateHtmlFooter(self):
if ((not self.htmlFooterCreated) and (self.htmlHeaderCreated)):
self.htmlFile.write(' </table>\n')
self.htmlFile.write('</body>\n')
self.htmlFile.write('</html>\n')
self.htmlFile.close()
self.htmlFooterCreated = False
def CreateHtmlTestRow(self, options, status, processesStatus):
if ((not self.htmlFooterCreated) and (self.htmlHeaderCreated)):
self.htmlFile.write(' <tr>\n')
self.htmlFile.write(' <td bgcolor = "lightcyan" >' + SSH.testCase_id + '</td>\n')
self.htmlFile.write(' <td>' + SSH.desc + '</td>\n')
self.htmlFile.write(' <td>' + str(options) + '</td>\n')
if (str(status) == 'OK'):
self.htmlFile.write(' <td bgcolor = "lightgreen" >' + str(status) + '</td>\n')
elif (str(status) == 'KO'):
if (processesStatus == 0):
self.htmlFile.write(' <td bgcolor = "lightcoral" >' + str(status) + '</td>\n')
elif (processesStatus == -1):
self.htmlFile.write(' <td bgcolor = "lightcoral" >KO - eNB process not found</td>\n')
elif (processesStatus == -2):
self.htmlFile.write(' <td bgcolor = "lightcoral" >KO - HSS process not found</td>\n')
elif (processesStatus == -3):
self.htmlFile.write(' <td bgcolor = "lightcoral" >KO - MME process not found</td>\n')
elif (processesStatus == -4):
self.htmlFile.write(' <td bgcolor = "lightcoral" >KO - SPGW process not found</td>\n')
else:
self.htmlFile.write(' <td bgcolor = "lightcoral" >' + str(status) + '</td>\n')
else:
self.htmlFile.write(' <td bgcolor = "orange" >' + str(status) + '</td>\n')
i = 0
while (i < self.htmlUEConnected):
self.htmlFile.write(' <td>-</td>\n')
i += 1
self.htmlFile.write(' </tr>\n')
def CreateHtmlTestRowQueue(self, options, status, ue_status, ue_queue):
if ((not self.htmlFooterCreated) and (self.htmlHeaderCreated)):
addOrangeBK = False
self.htmlFile.write(' <tr>\n')
self.htmlFile.write(' <td bgcolor = "lightcyan" >' + SSH.testCase_id + '</td>\n')
self.htmlFile.write(' <td>' + SSH.desc + '</td>\n')
self.htmlFile.write(' <td>' + str(options) + '</td>\n')
if (str(status) == 'OK'):
self.htmlFile.write(' <td bgcolor = "lightgreen" >' + str(status) + '</td>\n')
elif (str(status) == 'KO'):
self.htmlFile.write(' <td bgcolor = "lightcoral" >' + str(status) + '</td>\n')
else:
addOrangeBK = True
self.htmlFile.write(' <td bgcolor = "orange" >' + str(status) + '</td>\n')
i = 0
while (i < self.htmlUEConnected):
if (i < ue_status):
if (not ue_queue.empty()):
if (addOrangeBK):
self.htmlFile.write(' <td bgcolor = "orange" >' + str(ue_queue.get()) + '</td>\n')
else:
self.htmlFile.write(' <td>' + str(ue_queue.get()) + '</td>\n')
else:
self.htmlFile.write(' <td>-</td>\n')
else:
self.htmlFile.write(' <td>-</td>\n')
i += 1
self.htmlFile.write(' </tr>\n')
CONTAINERS.eNB_serverId[CONTAINERS.eNB_instance]=eNB_serverId
string_field = test.findtext('yaml_path')
if (string_field is not None):
CONTAINERS.yamlPath[CONTAINERS.eNB_instance] = string_field
string_field=test.findtext('d_retx_th')
if (string_field is not None):
CONTAINERS.ran_checkers['d_retx_th'] = [float(x) for x in string_field.split(',')]
string_field=test.findtext('u_retx_th')
if (string_field is not None):
CONTAINERS.ran_checkers['u_retx_th'] = [float(x) for x in string_field.split(',')]
string_field = test.findtext('services')
if string_field is not None:
CONTAINERS.services[CONTAINERS.eNB_instance] = string_field
CONTAINERS.num_attempts = int(test.findtext('num_attempts') or 1)
CONTAINERS.deploymentTag = cls_containerize.CreateTag(CONTAINERS.ranCommitID, CONTAINERS.ranBranch, CONTAINERS.ranAllowMerge)
if action == 'Deploy_Object':
success = CONTAINERS.DeployObject(HTML)
elif action == 'Undeploy_Object':
success = CONTAINERS.UndeployObject(HTML, RAN)
elif action == 'Create_Workspace':
if force_local:
# Do not create a working directory when running locally. Current repo directory will be used
return True
success = CONTAINERS.Create_Workspace(HTML)
elif action == 'Run_Physim':
physim_options = test.findtext('physim_run_args')
physim_test = test.findtext('physim_test')
physim_threshold = test.findtext('physim_time_threshold') or 'inf'
success = cls_native.Native.Run_Physim(HTML, RAN.eNBIPAddress, RAN.eNBSourceCodePath, physim_options, physim_test, physim_threshold)
elif action == 'LicenceAndFormattingCheck':
success = SCA.LicenceAndFormattingCheck(HTML)
elif action == 'Cppcheck_Analysis':
success = SCA.CppCheckAnalysis(HTML)
elif action == 'Push_Local_Registry':
svr_id = test.findtext('svr_id')
success = CONTAINERS.Push_Image_to_Local_Registry(HTML, svr_id)
elif action == 'Pull_Local_Registry' or action == 'Clean_Test_Server_Images':
if force_local:
# Do not pull or remove images when running locally. User is supposed to handle image creation & cleanup
return True
svr_id = test.findtext('svr_id')
images = test.findtext('images').split()
# hack: for FlexRIC, we need to overwrite the tag to use
tag = None
if len(images) == 1 and images[0] == "oai-flexric":
tag = CONTAINERS.flexricTag
if action == "Pull_Local_Registry":
success = CONTAINERS.Pull_Image_from_Registry(HTML, svr_id, images, tag=tag)
if action == "Clean_Test_Server_Images":
success = CONTAINERS.Clean_Test_Server_Images(HTML, svr_id, images, tag=tag)
elif action == 'Custom_Command':
node = test.findtext('node')
if force_local:
# Change all execution targets to localhost
node = 'localhost'
command = test.findtext('command')
command_fail = test.findtext('command_fail') in ['True', 'true', 'Yes', 'yes']
success = cls_oaicitest.Custom_Command(HTML, node, command, command_fail)
elif action == 'Custom_Script':
node = test.findtext('node')
script = test.findtext('script')
command_fail = test.findtext('command_fail') in ['True', 'true', 'Yes', 'yes']
success = cls_oaicitest.Custom_Script(HTML, node, script, command_fail)
elif action == 'Pull_Cluster_Image':
images = test.findtext('images').split()
node = test.findtext('node')
success = CLUSTER.PullClusterImage(HTML, node, images)
#-----------------------------------------------------------
# Usage()
#-----------------------------------------------------------
def Usage():
print('------------------------------------------------------------')
print('main.py Ver:' + Version)
print('------------------------------------------------------------')
print('Usage: python main.py [options]')
print(' --help Show this help.')
print(' --mode=[Mode]')
print(' TesteNB')
print(' TerminateeNB, TerminateUE, TerminateHSS, TerminateMME, TerminateSPGW')
print(' LogCollectBuild, LogCollecteNB, LogCollectHSS, LogCollectMME, LogCollectSPGW, LogCollectPing, LogCollectIperf')
print(' --eNBIPAddress=[eNB\'s IP Address]')
print(' --eNBRepository=[eNB\'s Repository URL]')
print(' --eNBBranch=[eNB\'s Branch Name]')
print(' --eNBCommitID=[eNB\'s Commit Number]')
print(' --eNBUserName=[eNB\'s Login User Name]')
print(' --eNBPassword=[eNB\'s Login Password]')
print(' --eNBSourceCodePath=[eNB\'s Source Code Path]')
print(' --EPCIPAddress=[EPC\'s IP Address]')
print(' --EPCUserName=[EPC\'s Login User Name]')
print(' --EPCPassword=[EPC\'s Login Password]')
print(' --EPCSourceCodePath=[EPC\'s Source Code Path]')
print(' --EPCType=[EPC\'s Type: OAI or ltebox]')
print(' --ADBIPAddress=[ADB\'s IP Address]')
print(' --ADBUserName=[ADB\'s Login User Name]')
print(' --ADBPassword=[ADB\'s Login Password]')
print(' --XMLTestFile=[XML Test File to be run]')
print('------------------------------------------------------------')
else:
logging.warning(f"unknown action {action}, skip step")
success = True # by default, we skip the step and print a warning
#-----------------------------------------------------------
# ShowTestID()
#-----------------------------------------------------------
def ShowTestID():
logging.debug('\u001B[1m----------------------------------------\u001B[0m')
logging.debug('\u001B[1mTest ID:' + SSH.testCase_id + '\u001B[0m')
logging.debug('\u001B[1m' + SSH.desc + '\u001B[0m')
logging.debug('\u001B[1m----------------------------------------\u001B[0m')
def CheckClassValidity(action,id):
if action != 'Build_eNB' and action != 'Initialize_eNB' and action != 'Terminate_eNB' and action != 'Initialize_UE' and action != 'Terminate_UE' and action != 'Attach_UE' and action != 'Detach_UE' and action != 'Ping' and action != 'Iperf' and action != 'Reboot_UE' and action != 'Initialize_HSS' and action != 'Terminate_HSS' and action != 'Initialize_MME' and action != 'Terminate_MME' and action != 'Initialize_SPGW' and action != 'Terminate_SPGW':
logging.debug('ERROR: test-case ' + id + ' has wrong class ' + action)
return False
return True
def GetParametersFromXML(action):
if action == 'Build_eNB':
SSH.Build_eNB_args = test.findtext('Build_eNB_args')
if action == 'Initialize_eNB':
SSH.Initialize_eNB_args = test.findtext('Initialize_eNB_args')
if action == 'Ping':
SSH.ping_args = test.findtext('ping_args')
SSH.ping_packetloss_threshold = test.findtext('ping_packetloss_threshold')
if action == 'Iperf':
SSH.iperf_args = test.findtext('iperf_args')
SSH.iperf_packetloss_threshold = test.findtext('iperf_packetloss_threshold')
SSH.iperf_profile = test.findtext('iperf_profile')
if (SSH.iperf_profile is None):
SSH.iperf_profile = 'balanced'
else:
if SSH.iperf_profile != 'balanced' and SSH.iperf_profile != 'unbalanced' and SSH.iperf_profile != 'single-ue':
logging.debug('ERROR: test-case has wrong profile ' + SSH.iperf_profile)
SSH.iperf_profile = 'balanced'
return success
#check if given test is in list
#it is in list if one of the strings in 'list' is at the beginning of 'test'
......@@ -1640,166 +442,203 @@ def test_in_list(test, list):
def receive_signal(signum, frame):
sys.exit(1)
#-----------------------------------------------------------
# Parameter Check
# MAIN PART
#-----------------------------------------------------------
#loading xml action list from yaml
import yaml
xml_class_list_file='xml_class_list.yml'
if (os.path.isfile(xml_class_list_file)):
yaml_file=xml_class_list_file
elif (os.path.isfile('ci-scripts/'+xml_class_list_file)):
yaml_file='ci-scripts/'+xml_class_list_file
else:
logging.error("XML action list yaml file cannot be found")
sys.exit("XML action list yaml file cannot be found")
with open(yaml_file,'r') as f:
# The FullLoader parameter handles the conversion-$
#from YAML scalar values to Python dictionary format$
xml_class_list = yaml.load(f,Loader=yaml.FullLoader)
mode = ''
SSH = SSHConnection()
argvs = sys.argv
argc = len(argvs)
CiTestObj = cls_oaicitest.OaiCiTest()
EPC = epc.EPCManagement()
RAN = ran.RANManagement()
HTML = cls_oai_html.HTMLManagement()
CONTAINERS = cls_containerize.Containerize()
SCA = cls_static_code_analysis.StaticCodeAnalysis()
PHYSIM = cls_physim1.PhySim()
CLUSTER = cls_cluster.Cluster()
while len(argvs) > 1:
myArgv = argvs.pop(1) # 0th is this file's name
if re.match('^\-\-help$', myArgv, re.IGNORECASE):
Usage()
sys.exit(0)
elif re.match('^\-\-mode=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-mode=(.+)$', myArgv, re.IGNORECASE)
mode = matchReg.group(1)
elif re.match('^\-\-eNBIPAddress=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-eNBIPAddress=(.+)$', myArgv, re.IGNORECASE)
SSH.eNBIPAddress = matchReg.group(1)
elif re.match('^\-\-eNBRepository=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-eNBRepository=(.+)$', myArgv, re.IGNORECASE)
SSH.eNBRepository = matchReg.group(1)
elif re.match('^\-\-eNB_AllowMerge=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-eNB_AllowMerge=(.+)$', myArgv, re.IGNORECASE)
doMerge = matchReg.group(1)
if ((doMerge == 'true') or (doMerge == 'True')):
SSH.eNB_AllowMerge = True
elif re.match('^\-\-eNBBranch=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-eNBBranch=(.+)$', myArgv, re.IGNORECASE)
SSH.eNBBranch = matchReg.group(1)
elif re.match('^\-\-eNBCommitID=(.*)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-eNBCommitID=(.*)$', myArgv, re.IGNORECASE)
SSH.eNBCommitID = matchReg.group(1)
elif re.match('^\-\-eNBUserName=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-eNBUserName=(.+)$', myArgv, re.IGNORECASE)
SSH.eNBUserName = matchReg.group(1)
elif re.match('^\-\-eNBPassword=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-eNBPassword=(.+)$', myArgv, re.IGNORECASE)
SSH.eNBPassword = matchReg.group(1)
elif re.match('^\-\-eNBSourceCodePath=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-eNBSourceCodePath=(.+)$', myArgv, re.IGNORECASE)
SSH.eNBSourceCodePath = matchReg.group(1)
elif re.match('^\-\-EPCIPAddress=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-EPCIPAddress=(.+)$', myArgv, re.IGNORECASE)
SSH.EPCIPAddress = matchReg.group(1)
elif re.match('^\-\-EPCBranch=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-EPCBranch=(.+)$', myArgv, re.IGNORECASE)
SSH.EPCBranch = matchReg.group(1)
elif re.match('^\-\-EPCUserName=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-EPCUserName=(.+)$', myArgv, re.IGNORECASE)
SSH.EPCUserName = matchReg.group(1)
elif re.match('^\-\-EPCPassword=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-EPCPassword=(.+)$', myArgv, re.IGNORECASE)
SSH.EPCPassword = matchReg.group(1)
elif re.match('^\-\-EPCSourceCodePath=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-EPCSourceCodePath=(.+)$', myArgv, re.IGNORECASE)
SSH.EPCSourceCodePath = matchReg.group(1)
elif re.match('^\-\-EPCType=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-EPCType=(.+)$', myArgv, re.IGNORECASE)
if re.match('OAI', matchReg.group(1), re.IGNORECASE) or re.match('ltebox', matchReg.group(1), re.IGNORECASE):
SSH.EPCType = matchReg.group(1)
else:
sys.exit('Invalid EPC Type: ' + matchReg.group(1) + ' -- (should be OAI or ltebox)')
elif re.match('^\-\-ADBIPAddress=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-ADBIPAddress=(.+)$', myArgv, re.IGNORECASE)
SSH.ADBIPAddress = matchReg.group(1)
elif re.match('^\-\-ADBUserName=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-ADBUserName=(.+)$', myArgv, re.IGNORECASE)
SSH.ADBUserName = matchReg.group(1)
elif re.match('^\-\-ADBPassword=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-ADBPassword=(.+)$', myArgv, re.IGNORECASE)
SSH.ADBPassword = matchReg.group(1)
elif re.match('^\-\-XMLTestFile=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-XMLTestFile=(.+)$', myArgv, re.IGNORECASE)
SSH.testXMLfile = matchReg.group(1)
else:
Usage()
sys.exit('Invalid Parameter: ' + myArgv)
#-----------------------------------------------------------
# Parsing Command Line Arguments
#-----------------------------------------------------------
import args_parse
# Force local execution, move all execution targets to localhost
force_local = False
py_param_file_present, py_params, mode, force_local = args_parse.ArgsParse(sys.argv,CiTestObj,RAN,HTML,EPC,CONTAINERS,HELP,SCA,PHYSIM,CLUSTER)
#-----------------------------------------------------------
# TEMPORARY params management (UNUSED)
#-----------------------------------------------------------
#temporary solution for testing:
if py_param_file_present == True:
AssignParams(py_params)
#-----------------------------------------------------------
# mode amd XML class (action) analysis
#-----------------------------------------------------------
cwd = os.getcwd()
if re.match('^TerminateeNB$', mode, re.IGNORECASE):
if SSH.eNBIPAddress == '' or SSH.eNBUserName == '' or SSH.eNBPassword == '':
Usage()
if RAN.eNBIPAddress == '' or RAN.eNBUserName == '' or RAN.eNBPassword == '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
SSH.TerminateeNB()
elif re.match('^TerminateUE$', mode, re.IGNORECASE):
if SSH.ADBIPAddress == '' or SSH.ADBUserName == '' or SSH.ADBPassword == '':
Usage()
sys.exit('Insufficient Parameter')
signal.signal(signal.SIGUSR1, receive_signal)
SSH.TerminateUE()
if RAN.eNBIPAddress == 'none':
sys.exit(0)
RAN.eNB_instance=0
RAN.eNB_serverId[0]='0'
RAN.eNBSourceCodePath='/tmp/'
RAN.TerminateeNB(HTML, EPC)
elif re.match('^TerminateHSS$', mode, re.IGNORECASE):
if SSH.EPCIPAddress == '' or SSH.EPCUserName == '' or SSH.EPCPassword == '' or SSH.EPCType == '' or SSH.EPCSourceCodePath == '':
Usage()
if EPC.IPAddress == '' or EPC.UserName == '' or EPC.Password == '' or EPC.Type == '' or EPC.SourceCodePath == '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
SSH.TerminateHSS()
EPC.TerminateHSS(HTML)
elif re.match('^TerminateMME$', mode, re.IGNORECASE):
if SSH.EPCIPAddress == '' or SSH.EPCUserName == '' or SSH.EPCPassword == '' or SSH.EPCType == '' or SSH.EPCSourceCodePath == '':
Usage()
if EPC.IPAddress == '' or EPC.UserName == '' or EPC.Password == '' or EPC.Type == '' or EPC.SourceCodePath == '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
SSH.TerminateMME()
EPC.TerminateMME(HTML)
elif re.match('^TerminateSPGW$', mode, re.IGNORECASE):
if SSH.EPCIPAddress == '' or SSH.EPCUserName == '' or SSH.EPCPassword == '' or SSH.EPCType == '' or SSH.EPCSourceCodePath == '':
Usage()
if EPC.IPAddress == '' or EPC.UserName == '' or EPC.Password == '' or EPC.Type == '' or EPC.SourceCodePath== '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
SSH.TerminateSPGW()
EPC.TerminateSPGW(HTML)
elif re.match('^LogCollectBuild$', mode, re.IGNORECASE):
if SSH.eNBIPAddress == '' or SSH.eNBUserName == '' or SSH.eNBPassword == '' or SSH.eNBSourceCodePath == '':
Usage()
if (RAN.eNBIPAddress == '' or RAN.eNBUserName == '' or RAN.eNBPassword == '' or RAN.eNBSourceCodePath == '') and (CiTestObj.UEIPAddress == '' or CiTestObj.UEUserName == '' or CiTestObj.UEPassword == '' or CiTestObj.UESourceCodePath == ''):
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
SSH.LogCollectBuild()
if RAN.eNBIPAddress == 'none':
sys.exit(0)
CiTestObj.LogCollectBuild(RAN)
elif re.match('^LogCollecteNB$', mode, re.IGNORECASE):
if SSH.eNBIPAddress == '' or SSH.eNBUserName == '' or SSH.eNBPassword == '' or SSH.eNBSourceCodePath == '':
Usage()
if RAN.eNBIPAddress == '' or RAN.eNBUserName == '' or RAN.eNBPassword == '' or RAN.eNBSourceCodePath == '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
SSH.LogCollecteNB()
if os.path.isdir('cmake_targets/log'):
cmd = 'zip -r enb.log.' + RAN.BuildId + '.zip cmake_targets/log'
logging.info(cmd)
try:
zipStatus = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True, timeout=60)
except subprocess.CalledProcessError as e:
logging.error("Command '{}' returned non-zero exit status {}.".format(e.cmd, e.returncode))
logging.error("Error output:\n{}".format(e.output))
sys.exit(0)
RAN.LogCollecteNB()
elif re.match('^LogCollectHSS$', mode, re.IGNORECASE):
if SSH.EPCIPAddress == '' or SSH.EPCUserName == '' or SSH.EPCPassword == '' or SSH.EPCType == '' or SSH.EPCSourceCodePath == '':
Usage()
if EPC.IPAddress == '' or EPC.UserName == '' or EPC.Password == '' or EPC.Type == '' or EPC.SourceCodePath == '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
SSH.LogCollectHSS()
EPC.LogCollectHSS()
elif re.match('^LogCollectMME$', mode, re.IGNORECASE):
if SSH.EPCIPAddress == '' or SSH.EPCUserName == '' or SSH.EPCPassword == '' or SSH.EPCType == '' or SSH.EPCSourceCodePath == '':
Usage()
if EPC.IPAddress == '' or EPC.UserName == '' or EPC.Password == '' or EPC.Type == '' or EPC.SourceCodePath == '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
SSH.LogCollectMME()
EPC.LogCollectMME()
elif re.match('^LogCollectSPGW$', mode, re.IGNORECASE):
if SSH.EPCIPAddress == '' or SSH.EPCUserName == '' or SSH.EPCPassword == '' or SSH.EPCType == '' or SSH.EPCSourceCodePath == '':
Usage()
if EPC.IPAddress == '' or EPC.UserName == '' or EPC.Password == '' or EPC.Type == '' or EPC.SourceCodePath == '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
SSH.LogCollectSPGW()
EPC.LogCollectSPGW()
elif re.match('^LogCollectPing$', mode, re.IGNORECASE):
if SSH.EPCIPAddress == '' or SSH.EPCUserName == '' or SSH.EPCPassword == '' or SSH.EPCSourceCodePath == '':
Usage()
if EPC.IPAddress == '' or EPC.UserName == '' or EPC.Password == '' or EPC.SourceCodePath == '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
SSH.LogCollectPing()
CiTestObj.LogCollectPing(EPC)
elif re.match('^LogCollectIperf$', mode, re.IGNORECASE):
if SSH.EPCIPAddress == '' or SSH.EPCUserName == '' or SSH.EPCPassword == '' or SSH.EPCSourceCodePath == '':
Usage()
if EPC.IPAddress == '' or EPC.UserName == '' or EPC.Password == '' or EPC.SourceCodePath == '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
SSH.LogCollectIperf()
elif re.match('^TesteNB$', mode, re.IGNORECASE):
if SSH.eNBIPAddress == '' or SSH.eNBRepository == '' or SSH.eNBBranch == '' or SSH.eNBUserName == '' or SSH.eNBPassword == '' or SSH.eNBSourceCodePath == '' or SSH.EPCIPAddress == '' or SSH.EPCUserName == '' or SSH.EPCPassword == '' or SSH.EPCType == '' or SSH.EPCSourceCodePath == '' or SSH.ADBIPAddress == '' or SSH.ADBUserName == '' or SSH.ADBPassword == '':
Usage()
CiTestObj.LogCollectIperf(EPC)
elif re.match('^LogCollectOAIUE$', mode, re.IGNORECASE):
if CiTestObj.UEIPAddress == '' or CiTestObj.UEUserName == '' or CiTestObj.UEPassword == '' or CiTestObj.UESourceCodePath == '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
SSH.CreateHtmlHeader()
CiTestObj.LogCollectOAIUE()
elif re.match('^InitiateHtml$', mode, re.IGNORECASE):
count = 0
foundCount = 0
while (count < HTML.nbTestXMLfiles):
#xml_test_file = cwd + "/" + CiTestObj.testXMLfiles[count]
xml_test_file = sys.path[0] + "/" + CiTestObj.testXMLfiles[count]
if (os.path.isfile(xml_test_file)):
try:
xmlTree = ET.parse(xml_test_file)
except Exception as e:
print(f"Error: {e} while parsing file: {xml_test_file}.")
xmlRoot = xmlTree.getroot()
HTML.htmlTabRefs.append(xmlRoot.findtext('htmlTabRef',default='test-tab-' + str(count)))
HTML.htmlTabNames.append(xmlRoot.findtext('htmlTabName',default='test-tab-' + str(count)))
HTML.htmlTabIcons.append(xmlRoot.findtext('htmlTabIcon',default='info-sign'))
foundCount += 1
count += 1
if foundCount != HTML.nbTestXMLfiles:
HTML.nbTestXMLfiles=foundCount
HTML.CreateHtmlHeader()
elif re.match('^FinalizeHtml$', mode, re.IGNORECASE):
logging.info('\u001B[1m----------------------------------------\u001B[0m')
logging.info('\u001B[1m Creating HTML footer \u001B[0m')
logging.info('\u001B[1m----------------------------------------\u001B[0m')
HTML.CreateHtmlFooter(CiTestObj.finalStatus)
elif re.match('^TesteNB$', mode, re.IGNORECASE) or re.match('^TestUE$', mode, re.IGNORECASE):
logging.info('\u001B[1m----------------------------------------\u001B[0m')
logging.info('\u001B[1m Starting Scenario: ' + CiTestObj.testXMLfiles[0] + '\u001B[0m')
logging.info('\u001B[1m----------------------------------------\u001B[0m')
if re.match('^TesteNB$', mode, re.IGNORECASE):
if RAN.eNBIPAddress == '' or RAN.ranRepository == '' or RAN.ranBranch == '' or RAN.eNBUserName == '' or RAN.eNBPassword == '' or RAN.eNBSourceCodePath == '' or EPC.IPAddress == '' or EPC.UserName == '' or EPC.Password == '' or EPC.Type == '' or EPC.SourceCodePath == '':
HELP.GenericHelp(CONST.Version)
if EPC.IPAddress == '' or EPC.UserName == '' or EPC.Password == '' or EPC.SourceCodePath == '' or EPC.Type == '':
HELP.EPCSrvHelp(EPC.IPAddress, EPC.UserName, EPC.Password, EPC.SourceCodePath, EPC.Type)
if RAN.ranRepository == '':
HELP.GitSrvHelp(RAN.ranRepository, RAN.ranBranch, RAN.ranCommitID, RAN.ranAllowMerge, RAN.ranTargetBranch)
if RAN.eNBIPAddress == '' or RAN.eNBUserName == '' or RAN.eNBPassword == '' or RAN.eNBSourceCodePath == '':
HELP.eNBSrvHelp(RAN.eNBIPAddress, RAN.eNBUserName, RAN.eNBPassword, RAN.eNBSourceCodePath)
sys.exit('Insufficient Parameter')
else:
if CiTestObj.UEIPAddress == '' or CiTestObj.ranRepository == '' or CiTestObj.ranBranch == '' or CiTestObj.UEUserName == '' or CiTestObj.UEPassword == '' or CiTestObj.UESourceCodePath == '':
HELP.GenericHelp(CONST.Version)
sys.exit('UE: Insufficient Parameter')
#read test_case_list.xml file
# if no parameters for XML file, use default value
if SSH.testXMLfile == '':
xml_test_file = sys.path[0] + "/test_case_list.xml"
# if no parameters for XML file, use default value
if (HTML.nbTestXMLfiles != 1):
xml_test_file = cwd + "/test_case_list.xml"
else:
xml_test_file = sys.path[0] + "/" + SSH.testXMLfile
xml_test_file = cwd + "/" + CiTestObj.testXMLfiles[0]
xmlTree = ET.parse(xml_test_file)
xmlRoot = xmlTree.getroot()
exclusion_tests=xmlRoot.findtext('TestCaseExclusionList',default='')
requested_tests=xmlRoot.findtext('TestCaseRequestedList',default='')
if (HTML.nbTestXMLfiles == 1):
HTML.htmlTabRefs.append(xmlRoot.findtext('htmlTabRef',default='test-tab-0'))
HTML.htmlTabNames.append(xmlRoot.findtext('htmlTabName',default='Test-0'))
all_tests=xmlRoot.findall('testCase')
exclusion_tests=exclusion_tests.split()
......@@ -1809,86 +648,84 @@ elif re.match('^TesteNB$', mode, re.IGNORECASE):
#(6 digits or less than 6 digits followed by +)
for test in exclusion_tests:
if (not re.match('^[0-9]{6}$', test) and
not re.match('^[0-9]{1,5}\+$', test)):
logging.debug('ERROR: exclusion test is invalidly formatted: ' + test)
not re.match('^[0-9]{1,5}\\+$', test)):
logging.error('exclusion test is invalidly formatted: ' + test)
sys.exit(1)
else:
logging.debug(test)
logging.info(test)
#check that requested tests are well formatted
#(6 digits or less than 6 digits followed by +)
#be verbose
for test in requested_tests:
if (re.match('^[0-9]{6}$', test) or
re.match('^[0-9]{1,5}\+$', test)):
logging.debug('INFO: test group/case requested: ' + test)
re.match('^[0-9]{1,5}\\+$', test)):
logging.info('test group/case requested: ' + test)
else:
logging.debug('ERROR: requested test is invalidly formatted: ' + test)
logging.error('requested test is invalidly formatted: ' + test)
sys.exit(1)
if (EPC.IPAddress != '') and (EPC.IPAddress != 'none'):
EPC.SetMmeIPAddress()
EPC.SetAmfIPAddress()
#get the list of tests to be done
todo_tests=[]
for test in requested_tests:
if (test_in_list(test, exclusion_tests)):
logging.debug('INFO: test will be skipped: ' + test)
logging.info('test will be skipped: ' + test)
else:
#logging.debug('INFO: test will be run: ' + test)
#logging.info('test will be run: ' + test)
todo_tests.append(test)
signal.signal(signal.SIGUSR1, receive_signal)
HTML.CreateHtmlTabHeader()
task_set_succeeded = True
HTML.startTime=int(round(time.time() * 1000))
for test_case_id in todo_tests:
for test in all_tests:
id = test.get('id')
if test_case_id != id:
continue
SSH.testCase_id = id
SSH.desc = test.findtext('desc')
CiTestObj.testCase_id = id
HTML.testCase_id=CiTestObj.testCase_id
EPC.testCase_id=CiTestObj.testCase_id
CiTestObj.desc = test.findtext('desc')
always_exec = test.findtext('always_exec') in ['True', 'true', 'Yes', 'yes']
HTML.desc=CiTestObj.desc
action = test.findtext('class')
if (CheckClassValidity(action, id) == False):
if (CheckClassValidity(xml_class_list, action, id) == False):
continue
ShowTestID()
GetParametersFromXML(action)
if action == 'Initialize_UE' or action == 'Attach_UE' or action == 'Detach_UE' or action == 'Ping' or action == 'Iperf' or action == 'Reboot_UE':
terminate_ue_flag = False
SSH.GetAllUEDevices(terminate_ue_flag)
if action == 'Build_eNB':
SSH.BuildeNB()
elif action == 'Initialize_eNB':
SSH.InitializeeNB()
elif action == 'Terminate_eNB':
SSH.TerminateeNB()
elif action == 'Initialize_UE':
SSH.InitializeUE()
elif action == 'Terminate_UE':
SSH.TerminateUE()
elif action == 'Attach_UE':
SSH.AttachUE()
elif action == 'Detach_UE':
SSH.DetachUE()
elif action == 'Ping':
SSH.Ping()
elif action == 'Iperf':
SSH.Iperf()
elif action == 'Reboot_UE':
SSH.RebootUE()
elif action == 'Initialize_HSS':
SSH.InitializeHSS()
elif action == 'Terminate_HSS':
SSH.TerminateHSS()
elif action == 'Initialize_MME':
SSH.InitializeMME()
elif action == 'Terminate_MME':
SSH.TerminateMME()
elif action == 'Initialize_SPGW':
SSH.InitializeSPGW()
elif action == 'Terminate_SPGW':
SSH.TerminateSPGW()
else:
sys.exit('Invalid action')
SSH.CreateHtmlFooter()
CiTestObj.ShowTestID()
if not task_set_succeeded and not always_exec:
msg = f"skipping test due to prior error"
logging.warning(msg)
HTML.CreateHtmlTestRowQueue(msg, "SKIP", [])
break
try:
test_succeeded = ExecuteActionWithParam(action)
if not test_succeeded:
logging.error(f"test ID {test_case_id} action {action} failed ({test_succeeded}), skipping next tests")
task_set_succeeded = False
except Exception as e:
s = traceback.format_exc()
logging.error(f'while running CI, an exception occurred:\n{s}')
HTML.CreateHtmlTestRowQueue("N/A", 'KO', [f"CI test code encountered an exception:\n{s}"])
task_set_succeeded = False
break
if not task_set_succeeded:
logging.error('\u001B[1;37;41mScenario failed\u001B[0m')
HTML.CreateHtmlTabFooter(False)
sys.exit('Failed Scenario')
else:
logging.info('\u001B[1;37;42mScenario passed\u001B[0m')
HTML.CreateHtmlTabFooter(True)
elif re.match('^LoadParams$', mode, re.IGNORECASE):
pass
else:
Usage()
HELP.GenericHelp(CONST.Version)
sys.exit('Invalid mode')
sys.exit(0)
#!/bin/bash
# Modified by Karim Boutiba (karim.boutiba@eurecom.fr)
# mbim-set-ip script version 1.1
# Modified mbimcli IPv4 IPv6 parsing script based on parts of project https://github.com/grandcentrix/thinkpad-x260-modem-scripts
# Licensed under the Apache License, Version 2.0
# Modified by Jörgen Storvist, Techship http://www.techship.com
# Further details in the Techship FAQ sections on using MBIM and setting correct modes in cellular modules:
# https://techship.com/faq/how-to-set-up-a-simple-data-connection-over-the-mbim-interface-using-libmbim-and-driver-cdc-mbim-in-linux/
#
# Installation & Usage:
#
# Verify that the cellular module is exposing the MBIM interface over the USB interface.
# Use mbimcli to check status for the cellular connection and enter the PIN code if necessary.
# Configure mbim-network.conf and place in /etc/ (example file included in the archive, check link bellow for details).
# https://www.freedesktop.org/software/libmbim/man/latest/mbim-network.1.html
# Start the mbim data connection with command:
# mbim-network <MBIM-INTERFACE> start
# Ensure execution of mbim-set-ip script with sufficient system priviledges
# ./mbim-set-ip <MBIM-INTERFACE> <NETWORK-INTERFACE>
#
# Examples:
# mbimcli -d /dev/cdc-wdm0 -p --query-device-caps
# mbimcli -d /dev/cdc-wdm0 -p --enter-pin=1234
# mbim-network /dev/cdc-wdm0 start
# ./mbim-set-ip /dev/cdc-wdm0 wwan0
#
# You should now be able to ping over the data connection.
# IP v4 ping: (only if IPv4 address was acquired from cellular module)
# ping -4 -I wwan0 8.8.8.8
# ping -4 -I wwan0 google.com
# IP v6 ping: (only if IPv6 address was acquired from cellular module)
# ping -6 -I wwan0 2001:4860:4860::8888
# ping -6 -I wwan0 google.com
#
# The script relies on the linux tools mbimcli, ip, systemd-resolve
# (Tested on libmbim 1.16.0 running on Ubuntu 18.10 cosmic (development) with kernel version 4.15.0)
# Details on mbim-network and mbimcli
# https://www.freedesktop.org/software/libmbim/man/latest/mbim-network.1.html
# https://www.freedesktop.org/software/libmbim/man/latest/mbimcli.1.html
ipv4_addresses=()
ipv4_gateway=""
ipv4_dns=()
ipv4_mtu=""
ipv6_addresses=()
ipv6_gateway=""
ipv6_dns=()
ipv6_mtu=""
#CONTROL-IFACE
CONTROLDEV="$1"
#WWAN-IFACE
DEV="$2"
SESSION="$3"
echo $SESSION
echo "Requesting IPv4 and IPv6 information through mbimcli proxy:"
mbimcli -d $CONTROLDEV -p --query-ip-configuration
IPDATA=$(mbimcli -d $CONTROLDEV -p --query-ip-configuration=$SESSION)
function parse_ip {
# IP [0]: '10.134.203.177/30'
local line_re="IP \[([0-9]+)\]: '(.+)'"
local input=$1
if [[ $input =~ $line_re ]]; then
local ip_cnt=${BASH_REMATCH[1]}
local ip=${BASH_REMATCH[2]}
fi
echo "$ip"
}
function parse_gateway {
# Gateway: '10.134.203.178'
local line_re="Gateway: '(.+)'"
local input=$1
if [[ $input =~ $line_re ]]; then
local gw=${BASH_REMATCH[1]}
fi
echo "$gw"
}
function parse_dns {
# DNS [0]: '10.134.203.177/30'
local line_re="DNS \[([0-9]+)\]: '(.+)'"
local input=$1
if [[ $input =~ $line_re ]]; then
local dns_cnt=${BASH_REMATCH[1]}
local dns=${BASH_REMATCH[2]}
fi
echo "$dns"
}
function parse_mtu {
# MTU: '1500'
local line_re="MTU: '([0-9]+)'"
local input=$1
if [[ $input =~ $line_re ]]; then
local mtu=${BASH_REMATCH[1]}
fi
echo "$mtu"
}
while read -r line || [[ -n "$line" ]] ; do
[ -z "$line" ] && continue
case "$line" in
*"IPv4 configuration available: 'none'"*)
state="start"
continue
;;
*"IPv4 configuration available"*)
state="ipv4"
continue
;;
*"IPv6 configuration available: 'none'"*)
state="start"
continue
;;
*"IPv6 configuration available"*)
state="ipv6"
continue
;;
*)
;;
esac
case "$state" in
"ipv4")
case "$line" in
*"IP"*)
row=$(parse_ip "$line")
ipv4_addresses+=("$row")
continue
;;
*"Gateway"*)
row=$(parse_gateway "$line")
ipv4_gateway="$row"
continue
;;
*"DNS"*)
row=$(parse_dns "$line")
ipv4_dns+=("$row")
continue
;;
*"MTU"*)
row=$(parse_mtu "$line")
ipv4_mtu="$row"
continue
;;
*)
;;
esac
;;
"ipv6")
case "$line" in
*"IP"*)
row=$(parse_ip "$line")
ipv6_addresses+=("$row")
continue
;;
*"Gateway"*)
row=$(parse_gateway "$line")
ipv6_gateway="$row"
continue
;;
*"DNS"*)
row=$(parse_dns "$line")
ipv6_dns+=("$row")
continue
;;
*"MTU"*)
row=$(parse_mtu "$line")
ipv6_mtu="$row"
continue
;;
*)
continue
;;
esac
;;
*)
continue
;;
esac
done <<< "$IPDATA"
execfile=$(mktemp)
printf "ip link set $DEV down\n" >> $execfile
printf "ip addr flush dev $DEV \n" >> $execfile
printf "ip -6 addr flush dev $DEV \n" >> $execfile
printf "ip link set $DEV up\n" >> $execfile
if [[ "${#ipv4_addresses[@]}" > 0 ]]; then
printf "ip addr add %s dev $DEV broadcast +\n" "${ipv4_addresses[@]}" >> $execfile
printf "ip route add 192.168.61.0/24 via $ipv4_gateway dev $DEV\n" >> $execfile
if [ -n "$ipv4_mtu" ]; then
printf "ip link set mtu $ipv4_mtu dev $DEV \n" >> $execfile
fi
if [[ "${#ipv4_dns[@]}" > 0 ]]; then
printf "resolvectl -4 --interface=$DEV --set-dns=%s\n" "${ipv4_dns[@]}" >>$execfile
fi
fi
if [[ "${#ipv6_addresses[@]}" > 0 ]]; then
printf "ip -6 addr add %s dev $DEV\n" "${ipv6_addresses[@]}" >> $execfile
printf "ip -6 route add default via $ipv6_gateway dev $DEV\n" >> $execfile
if [ -n "$ipv6_mtu" ]; then
printf "ip -6 link set mtu $ipv6_mtu dev $DEV\n" >> $execfile
fi
if [[ "${#ipv6_dns[@]}" > 0 ]]; then
printf "systemd-resolve -6 --interface=$DEV --set-dns=%s\n" "${ipv6_dns[@]}" >>$execfile
fi
fi
echo "Applying the following network interface configurations:"
cat $execfile
bash $execfile
rm $execfile
echo "Network interface configurations completed."
set -x
sudo mbimcli -p -d /dev/cdc-wdm0 --set-radio-state=off
sleep 1
sudo mbimcli -p -d /dev/cdc-wdm0 --set-radio-state=on
sleep 2
sudo mbimcli -p -d /dev/cdc-wdm0 --attach-packet-service
sudo mbimcli -p -d /dev/cdc-wdm0 --connect=session-id=0,access-string=oai.ipv4,ip-type=ipv4
sudo /opt/mbim/mbim-set-ip.sh /dev/cdc-wdm0 wwan0 0
set -x
sudo mbimcli -p -d /dev/cdc-wdm0 --set-radio-state=off
version: '2'
services:
mysql:
container_name: oaicicd_mysql
restart: always
image: mysql:latest
environment:
MYSQL_ROOT_PASSWORD: 'ucZBc2XRYdvEm59F'
ports:
- "3307:3306"
volumes:
- /home/oaicicd/mysql/data:/var/lib/mysql
import pymysql
import sys
from datetime import datetime
#This is the script used to write the test results to the mysql DB
#Called by Jenkins pipeline (jenkinsfile)
#Must be located in /home/oaicicid/mysql on the database host
#Usage from Jenkinsfile :
#python3 /home/oaicicd/mysql/sql_connect.py ${JOB_NAME} ${params.eNB_MR} ${params.eNB_Branch} ${env.BUILD_ID} ${env.BUILD_URL} ${StatusForDb} ''
class SQLConnect:
def __init__(self):
self.connection = pymysql.connect(
host='172.22.0.2',
user='root',
password = 'ucZBc2XRYdvEm59F',
db='oaicicd_tests',
port=3306
)
def put(self,TEST,MR,BRANCH,BUILD,BUILD_LINK,STATUS):
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
cur=self.connection.cursor()
cur.execute ('INSERT INTO test_results (TEST,MR,BRANCH,BUILD,BUILD_LINK,STATUS,DATE) VALUES (%s,%s,%s,%s,%s,%s,%s);' , (TEST, MR, BRANCH, BUILD, BUILD_LINK, STATUS, now))
self.connection.commit()
self.connection.close()
if __name__ == "__main__":
mydb=SQLConnect()
mydb.put(sys.argv[1],sys.argv[2],sys.argv[3],sys.argv[4],sys.argv[5],sys.argv[6])
import argparse
import re
import subprocess
import sys
AUTH_SERVICE = 'registry.docker.io'
AUTH_SCOPE = 'repository:oaisoftwarealliance/oai-enb:pull'
def main() -> None:
args = _parse_args()
cmd = 'curl -fsSL "https://auth.docker.io/token?service=' + AUTH_SERVICE + '&scope=' + AUTH_SCOPE + '" 2>/dev/null | jq --raw-output ".token"'
token = subprocess.check_output(cmd, shell=True, universal_newlines=True)
token = str(token).strip()
cmd = 'curl -fsSL -H "Authorization: Bearer ' + token + '" "https://index.docker.io/v2/oaisoftwarealliance/oai-enb/tags/list" 2>/dev/null | jq .'
listOfTags = subprocess.check_output(cmd, shell=True, universal_newlines=True)
foundTag = False
for tag in listOfTags.split('\n'):
if re.search('"' + args.start_tag + '"', tag) is not None:
foundTag = True
if not foundTag:
print (args.start_tag)
sys.exit(0)
proposedVariants = ['a', 'b', 'c', 'd']
for variant in proposedVariants:
foundTag = False
currentVariant = variant
for tag in listOfTags.split('\n'):
if re.search('"' + args.start_tag + variant + '"', tag) is not None:
foundTag = True
break
if not foundTag:
break
if not foundTag:
print (args.start_tag + currentVariant)
def _parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description='Provides an unique new image tag for DockerHub')
parser.add_argument(
'--start_tag', '-st',
action='store',
required=True,
help='Proposed Starting Tag',
)
return parser.parse_args()
if __name__ == '__main__':
main()
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
# Python for CI of OAI-eNB + COTS-UE
#
# Required Python Version
# Python 3.x
#
# Required Python Package
# pexpect
#---------------------------------------------------------------------
#-----------------------------------------------------------
# Import
#-----------------------------------------------------------
import sys # arg
import re # reg
import logging
import os
import time
import yaml
import cls_cmd
#-----------------------------------------------------------
# OAI Testing modules
#-----------------------------------------------------------
import sshconnection as SSH
import helpreadme as HELP
import constants as CONST
#-----------------------------------------------------------
# Class Declaration
#-----------------------------------------------------------
class RANManagement():
def __init__(self):
self.ranRepository = ''
self.ranBranch = ''
self.ranAllowMerge = False
self.ranCommitID = ''
self.ranTargetBranch = ''
self.eNBIPAddress = ''
self.eNBUserName = ''
self.eNBPassword = ''
self.eNBSourceCodePath = ''
self.eNB1IPAddress = ''
self.eNB1UserName = ''
self.eNB1Password = ''
self.eNB1SourceCodePath = ''
self.eNB2IPAddress = ''
self.eNB2UserName = ''
self.eNB2Password = ''
self.eNB2SourceCodePath = ''
self.Build_eNB_args = ''
self.backgroundBuild = False
self.backgroundBuildTestId = ['', '', '']
self.Build_eNB_forced_workspace_cleanup = False
self.Initialize_eNB_args = ''
self.imageKind = ''
self.air_interface = ['', '', ''] #changed from 'lte' to '' may lead to side effects in main
self.eNB_instance = 0
self.eNB_serverId = ['', '', '']
self.eNBLogFiles = ['', '', '']
self.eNBOptions = ['', '', '']
self.eNBmbmsEnables = [False, False, False]
self.eNBstatuses = [-1, -1, -1]
self.testCase_id = ''
self.epcPcapFile = ''
self.runtime_stats= ''
self.datalog_rt_stats={}
self.datalog_rt_stats_file='datalog_rt_stats.default.yaml'
self.USRPIPAddress = ''
#checkers from xml
self.ran_checkers={}
self.cmd_prefix = '' # prefix before {lte,nr}-softmodem
self.node = ''
self.command = ''
self.command_fail = False
#-----------------------------------------------------------
# RAN management functions
#-----------------------------------------------------------
def InitializeeNB(self, HTML, EPC):
if self.eNB_serverId[self.eNB_instance] == '0':
lIpAddr = self.eNBIPAddress
lUserName = self.eNBUserName
lPassWord = self.eNBPassword
lSourcePath = self.eNBSourceCodePath
elif self.eNB_serverId[self.eNB_instance] == '1':
lIpAddr = self.eNB1IPAddress
lUserName = self.eNB1UserName
lPassWord = self.eNB1Password
lSourcePath = self.eNB1SourceCodePath
elif self.eNB_serverId[self.eNB_instance] == '2':
lIpAddr = self.eNB2IPAddress
lUserName = self.eNB2UserName
lPassWord = self.eNB2Password
lSourcePath = self.eNB2SourceCodePath
if lIpAddr == '' or lUserName == '' or lPassWord == '' or lSourcePath == '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
logging.debug('Starting eNB/gNB on server: ' + lIpAddr)
self.testCase_id = HTML.testCase_id
mySSH = SSH.SSHConnection()
cwd = os.getcwd()
# If tracer options is on, running tshark on EPC side and capture traffic b/ EPC and eNB
if EPC.IPAddress != "none":
localEpcIpAddr = EPC.IPAddress
localEpcUserName = EPC.UserName
localEpcPassword = EPC.Password
mySSH.open(localEpcIpAddr, localEpcUserName, localEpcPassword)
eth_interface = 'any'
fltr = 'sctp'
logging.debug('\u001B[1m Launching tshark on EPC on interface ' + eth_interface + ' with filter "' + fltr + '"\u001B[0m')
self.epcPcapFile = 'enb_' + self.testCase_id + '_s1log.pcap'
mySSH.command('echo ' + localEpcPassword + ' | sudo -S rm -f /tmp/' + self.epcPcapFile , '\$', 5)
mySSH.command('echo $USER; nohup sudo tshark -f "host ' + lIpAddr +'" -i ' + eth_interface + ' -f "' + fltr + '" -w /tmp/' + self.epcPcapFile + ' > /tmp/tshark.log 2>&1 &', localEpcUserName, 5)
mySSH.close()
mySSH.open(lIpAddr, lUserName, lPassWord)
mySSH.command('cd ' + lSourcePath, '\$', 5)
# Initialize_eNB_args usually start with -O and followed by the location in repository
full_config_file = self.Initialize_eNB_args.replace('-O ','')
extra_options = ''
extIdx = full_config_file.find('.conf')
if (extIdx > 0):
extra_options = full_config_file[extIdx + 5:]
# if tracer options is on, compiling and running T Tracer
result = re.search('T_stdout', str(extra_options))
if result is not None:
logging.debug('\u001B[1m Compiling and launching T Tracer\u001B[0m')
mySSH.command('cd common/utils/T/tracer', '\$', 5)
mySSH.command('make', '\$', 10)
mySSH.command('echo $USER; nohup ./record -d ../T_messages.txt -o ' + lSourcePath + '/cmake_targets/enb_' + self.testCase_id + '_record.raw -ON -off VCD -off HEAVY -off LEGACY_GROUP_TRACE -off LEGACY_GROUP_DEBUG > ' + lSourcePath + '/cmake_targets/enb_' + self.testCase_id + '_record.log 2>&1 &', lUserName, 5)
mySSH.command('cd ' + lSourcePath, '\$', 5)
full_config_file = full_config_file[:extIdx + 5]
config_path, config_file = os.path.split(full_config_file)
else:
sys.exit('Insufficient Parameter')
ci_full_config_file = config_path + '/ci-' + config_file
rruCheck = False
result = re.search('^rru|^rcc|^du.band', str(config_file))
if result is not None:
rruCheck = True
# Make a copy and adapt to EPC / eNB IP addresses
mySSH.command('cp ' + full_config_file + ' ' + ci_full_config_file, '\$', 5)
localMmeIpAddr = EPC.MmeIPAddress
mySSH.command('sed -i -e \'s/CI_MME_IP_ADDR/' + localMmeIpAddr + '/\' ' + ci_full_config_file, '\$', 2);
mySSH.command('sed -i -e \'s/CI_ENB_IP_ADDR/' + lIpAddr + '/\' ' + ci_full_config_file, '\$', 2);
mySSH.command('sed -i -e \'s/CI_GNB_IP_ADDR/' + lIpAddr + '/\' ' + ci_full_config_file, '\$', 2);
mySSH.command('sed -i -e \'s/CI_RCC_IP_ADDR/' + self.eNBIPAddress + '/\' ' + ci_full_config_file, '\$', 2);
mySSH.command('sed -i -e \'s/CI_RRU1_IP_ADDR/' + self.eNB1IPAddress + '/\' ' + ci_full_config_file, '\$', 2);
mySSH.command('sed -i -e \'s/CI_RRU2_IP_ADDR/' + self.eNB2IPAddress + '/\' ' + ci_full_config_file, '\$', 2);
mySSH.command('sed -i -e \'s/CI_FR1_CTL_ENB_IP_ADDR/' + self.eNBIPAddress + '/\' ' + ci_full_config_file, '\$', 2);
self.eNBmbmsEnables[int(self.eNB_instance)] = False
mySSH.command('grep --colour=never enable_enb_m2 ' + ci_full_config_file, '\$', 2);
result = re.search('yes', mySSH.getBefore())
if result is not None:
self.eNBmbmsEnables[int(self.eNB_instance)] = True
logging.debug('\u001B[1m MBMS is enabled on this eNB\u001B[0m')
result = re.search('noS1', str(self.Initialize_eNB_args))
eNBinNoS1 = False
if result is not None:
eNBinNoS1 = True
logging.debug('\u001B[1m eNB is in noS1 configuration \u001B[0m')
# Launch eNB with the modified config file
mySSH.command('source oaienv', '\$', 5)
mySSH.command('cd cmake_targets', '\$', 5)
if self.air_interface[self.eNB_instance] == 'nr-softmodem':
mySSH.command('if [ -e rbconfig.raw ]; then echo ' + lPassWord + ' | sudo -S rm rbconfig.raw; fi', '\$', 5)
mySSH.command('if [ -e reconfig.raw ]; then echo ' + lPassWord + ' | sudo -S rm reconfig.raw; fi', '\$', 5)
# NOTE: WE SHALL do a check if the executable is present (in case build went wrong)
#hack UHD_RFNOC_DIR variable for gNB / N310 on RHEL8 server:
#if the USRP address is in the xml then we are using an eth USRP (N3xx)
if self.air_interface[self.eNB_instance] == 'lte-softmodem':
gNB = False
else:
gNB = True
mySSH.command(f'echo "ulimit -c unlimited && {self.cmd_prefix} ./ran_build/build/{self.air_interface[self.eNB_instance]} -O {lSourcePath}/{ci_full_config_file} {extra_options}" > ./my-lte-softmodem-run{self.eNB_instance}.sh', '\$', 5)
mySSH.command('chmod 775 ./my-lte-softmodem-run' + str(self.eNB_instance) + '.sh', '\$', 5)
mySSH.command('echo ' + lPassWord + ' | sudo -S rm -Rf enb_' + self.testCase_id + '.log', '\$', 5)
mySSH.command('echo $USER; nohup sudo -E stdbuf -o0 ./my-lte-softmodem-run' + str(self.eNB_instance) + '.sh > ' + lSourcePath + '/cmake_targets/enb_' + self.testCase_id + '.log 2>&1 &', lUserName, 10)
self.eNBLogFiles[int(self.eNB_instance)] = 'enb_' + self.testCase_id + '.log'
if extra_options != '':
self.eNBOptions[int(self.eNB_instance)] = extra_options
time.sleep(6)
doLoop = True
loopCounter = 20
enbDidSync = False
while (doLoop):
loopCounter = loopCounter - 1
if (loopCounter == 0):
# In case of T tracer recording, we may need to kill it
result = re.search('T_stdout', str(self.Initialize_eNB_args))
if result is not None:
mySSH.command('killall --signal SIGKILL record', '\$', 5)
mySSH.close()
doLoop = False
logging.error('\u001B[1;37;41m eNB/gNB logging system did not show got sync! \u001B[0m')
HTML.CreateHtmlTestRow(self.air_interface[self.eNB_instance] + ' -O ' + config_file + extra_options, 'KO', CONST.ALL_PROCESSES_OK)
# In case of T tracer recording, we need to kill tshark on EPC side
localEpcIpAddr = EPC.IPAddress
localEpcUserName = EPC.UserName
localEpcPassword = EPC.Password
mySSH.open(localEpcIpAddr, localEpcUserName, localEpcPassword)
logging.debug('\u001B[1m Stopping tshark on EPC \u001B[0m')
mySSH.command('echo ' + localEpcPassword + ' | sudo -S killall --signal SIGKILL tshark', '\$', 5)
if self.epcPcapFile != '':
mySSH.command('echo ' + localEpcPassword + ' | sudo -S chmod 666 /tmp/' + self.epcPcapFile, '\$', 5)
mySSH.close()
if self.epcPcapFile != '':
copyin_res = mySSH.copyin(localEpcIpAddr, localEpcUserName, localEpcPassword, '/tmp/' + self.epcPcapFile, '.')
if (copyin_res == 0):
mySSH.copyout(lIpAddr, lUserName, lPassWord, self.epcPcapFile, lSourcePath + '/cmake_targets/.')
return False
else:
mySSH.command('stdbuf -o0 cat enb_' + self.testCase_id + '.log | grep -E --text --color=never -i "wait|sync|Starting|Started"', '\$', 4)
if rruCheck:
result = re.search('wait RUs', mySSH.getBefore())
else:
result = re.search('got sync|Starting F1AP at CU', mySSH.getBefore())
if result is None:
time.sleep(6)
else:
doLoop = False
enbDidSync = True
time.sleep(10)
rruCheck = False
result = re.search('^rru|^du.band', str(config_file))
if result is not None:
rruCheck = True
if enbDidSync and eNBinNoS1 and not rruCheck:
mySSH.command('ifconfig oaitun_enb1', '\$', 4)
mySSH.command('ifconfig oaitun_enb1', '\$', 4)
result = re.search('inet addr:1|inet 1', mySSH.getBefore())
if result is not None:
logging.debug('\u001B[1m oaitun_enb1 interface is mounted and configured\u001B[0m')
else:
logging.error('\u001B[1m oaitun_enb1 interface is either NOT mounted or NOT configured\u001B[0m')
if self.eNBmbmsEnables[int(self.eNB_instance)]:
mySSH.command('ifconfig oaitun_enm1', '\$', 4)
result = re.search('inet addr', mySSH.getBefore())
if result is not None:
logging.debug('\u001B[1m oaitun_enm1 interface is mounted and configured\u001B[0m')
else:
logging.error('\u001B[1m oaitun_enm1 interface is either NOT mounted or NOT configured\u001B[0m')
if enbDidSync:
self.eNBstatuses[int(self.eNB_instance)] = int(self.eNB_serverId[self.eNB_instance])
mySSH.close()
HTML.CreateHtmlTestRow(f'{self.cmd_prefix} {self.air_interface[self.eNB_instance]} -O {config_file} {extra_options}', 'OK', CONST.ALL_PROCESSES_OK)
logging.debug('\u001B[1m Initialize eNB/gNB Completed\u001B[0m')
return enbDidSync
def CheckeNBProcess(self, status_queue):
# At least the instance 0 SHALL be on!
if self.eNBstatuses[0] == 0:
lIpAddr = self.eNBIPAddress
lUserName = self.eNBUserName
lPassWord = self.eNBPassword
elif self.eNBstatuses[0] == 1:
lIpAddr = self.eNB1IPAddress
lUserName = self.eNB1UserName
lPassWord = self.eNB1Password
elif self.eNBstatuses[0] == 2:
lIpAddr = self.eNB2IPAddress
lUserName = self.eNB2UserName
lPassWord = self.eNB2Password
else:
lIpAddr = self.eNBIPAddress
lUserName = self.eNBUserName
lPassWord = self.eNBPassword
mySSH = SSH.SSHConnection()
mySSH.open(lIpAddr, lUserName, lPassWord)
if self.air_interface[self.eNB_instance] == '':
pattern = 'softmodem'
else:
pattern = self.air_interface[self.eNB_instance]
mySSH.command('stdbuf -o0 ps -aux | grep --color=never ' + pattern + ' | grep -v grep', '\$', 5)
result = re.search(pattern, mySSH.getBefore())
success = result is not None
if not success:
logging.debug('\u001B[1;37;41m eNB Process Not Found! \u001B[0m')
status_queue.put(CONST.ENB_PROCESS_FAILED)
else:
status_queue.put(CONST.ENB_PROCESS_OK)
mySSH.close()
return success
def TerminateeNB(self, HTML, EPC):
if self.eNB_serverId[self.eNB_instance] == '0':
lIpAddr = self.eNBIPAddress
lUserName = self.eNBUserName
lPassWord = self.eNBPassword
lSourcePath = self.eNBSourceCodePath
elif self.eNB_serverId[self.eNB_instance] == '1':
lIpAddr = self.eNB1IPAddress
lUserName = self.eNB1UserName
lPassWord = self.eNB1Password
lSourcePath = self.eNB1SourceCodePath
elif self.eNB_serverId[self.eNB_instance] == '2':
lIpAddr = self.eNB2IPAddress
lUserName = self.eNB2UserName
lPassWord = self.eNB2Password
lSourcePath = self.eNB2SourceCodePath
if lIpAddr == '' or lUserName == '' or lPassWord == '' or lSourcePath == '':
HELP.GenericHelp(CONST.Version)
sys.exit('Insufficient Parameter')
logging.debug('Stopping eNB/gNB on server: ' + lIpAddr)
mySSH = SSH.SSHConnection()
mySSH.open(lIpAddr, lUserName, lPassWord)
mySSH.command('cd ' + lSourcePath + '/cmake_targets', '\$', 5)
if self.air_interface[self.eNB_instance] == 'lte-softmodem':
nodeB_prefix = 'e'
else:
nodeB_prefix = 'g'
mySSH.command('stdbuf -o0 ps -aux | grep --color=never -e softmodem | grep -v grep', '\$', 5)
result = re.search('-softmodem', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + lPassWord + ' | sudo -S killall --signal SIGINT -r .*-softmodem || true', '\$', 5)
time.sleep(10)
mySSH.command('stdbuf -o0 ps -aux | grep --color=never -e softmodem | grep -v grep', '\$', 5)
result = re.search('-softmodem', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + lPassWord + ' | sudo -S killall --signal SIGKILL -r .*-softmodem || true', '\$', 5)
time.sleep(5)
mySSH.command('rm -f my-lte-softmodem-run' + str(self.eNB_instance) + '.sh', '\$', 5)
#stopping tshark (valid if eNB and enabled in xml, will not harm otherwise)
logging.debug('\u001B[1m Stopping tshark on xNB \u001B[0m')
mySSH.command('echo ' + lPassWord + ' | sudo -S killall --signal SIGKILL tshark', '\$', 5)
time.sleep(1)
mySSH.close()
if EPC.IPAddress != "none" and EPC.IPAddress != '':
localEpcIpAddr = EPC.IPAddress
localEpcUserName = EPC.UserName
localEpcPassword = EPC.Password
logging.debug('\u001B[1m Stopping tshark on EPC (' + localEpcIpAddr + ') \u001B[0m')
mySSH.open(localEpcIpAddr, localEpcUserName, localEpcPassword)
mySSH.command('echo ' + localEpcPassword + ' | sudo -S killall --signal SIGKILL tshark', '\$', 5)
time.sleep(1)
if self.epcPcapFile != '':
mySSH.command('echo ' + localEpcPassword + ' | sudo -S chmod 666 /tmp/' + self.epcPcapFile, '\$', 5)
mySSH.copyin(localEpcIpAddr, localEpcUserName, localEpcPassword, '/tmp/' + self.epcPcapFile, '.')
mySSH.copyout(lIpAddr, lUserName, lPassWord, self.epcPcapFile, lSourcePath + '/cmake_targets/.')
mySSH.command('killall --signal SIGKILL record', '\$', 5)
mySSH.close()
# if T tracer was run with option 0 (no logs), analyze logs
# from textlog, otherwise do normal analysis (e.g., option 2)
result = re.search('T_stdout 0', str(self.Initialize_eNB_args))
if (result is not None):
logging.debug('\u001B[1m Replaying RAW record file\u001B[0m')
mySSH.open(lIpAddr, lUserName, lPassWord)
mySSH.command('cd ' + lSourcePath + '/common/utils/T/tracer/', '\$', 5)
enbLogFile = self.eNBLogFiles[int(self.eNB_instance)]
raw_record_file = enbLogFile.replace('.log', '_record.raw')
replay_log_file = enbLogFile.replace('.log', '_replay.log')
extracted_txt_file = enbLogFile.replace('.log', '_extracted_messages.txt')
extracted_log_file = enbLogFile.replace('.log', '_extracted_messages.log')
mySSH.command('./extract_config -i ' + lSourcePath + '/cmake_targets/' + raw_record_file + ' > ' + lSourcePath + '/cmake_targets/' + extracted_txt_file, '\$', 5)
mySSH.command('echo $USER; nohup ./replay -i ' + lSourcePath + '/cmake_targets/' + raw_record_file + ' > ' + lSourcePath + '/cmake_targets/' + replay_log_file + ' 2>&1 &', lUserName, 5)
mySSH.command('./textlog -d ' + lSourcePath + '/cmake_targets/' + extracted_txt_file + ' -no-gui -ON -full > ' + lSourcePath + '/cmake_targets/' + extracted_log_file, '\$', 5)
mySSH.close()
mySSH.copyin(lIpAddr, lUserName, lPassWord, lSourcePath + '/cmake_targets/' + extracted_log_file, '.')
logging.debug('\u001B[1m Analyzing eNB replay logfile \u001B[0m')
logStatus = self.AnalyzeLogFile_eNB(extracted_log_file, HTML, self.ran_checkers)
HTML.CreateHtmlTestRow(self.runtime_stats, 'OK', CONST.ALL_PROCESSES_OK)
self.eNBLogFiles[int(self.eNB_instance)] = ''
return True
else:
analyzeFile = False
if self.eNBLogFiles[int(self.eNB_instance)] != '':
analyzeFile = True
fileToAnalyze = self.eNBLogFiles[int(self.eNB_instance)]
self.eNBLogFiles[int(self.eNB_instance)] = ''
if analyzeFile:
#*stats.log files + pickle + png
mySSH.copyin(lIpAddr, lUserName, lPassWord, lSourcePath + '/cmake_targets/*stats.log', '.')
mySSH.copyin(lIpAddr, lUserName, lPassWord, lSourcePath + '/cmake_targets/*.pickle', '.')
mySSH.copyin(lIpAddr, lUserName, lPassWord, lSourcePath + '/cmake_targets/*.png', '.')
#
copyin_res = mySSH.copyin(lIpAddr, lUserName, lPassWord, lSourcePath + '/cmake_targets/' + fileToAnalyze, '.')
if (copyin_res == -1):
logging.debug('\u001B[1;37;41m Could not copy ' + nodeB_prefix + 'NB logfile to analyze it! \u001B[0m')
HTML.htmleNBFailureMsg='Could not copy ' + nodeB_prefix + 'NB logfile to analyze it!'
HTML.CreateHtmlTestRow('N/A', 'KO', CONST.ENB_PROCESS_NOLOGFILE_TO_ANALYZE)
self.eNBmbmsEnables[int(self.eNB_instance)] = False
return False
if self.eNB_serverId[self.eNB_instance] != '0':
#*stats.log files + pickle + png
#debug / tentative
mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './nrL1_stats.log', self.eNBSourceCodePath + '/cmake_targets/')
mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './nrMAC_stats.log', self.eNBSourceCodePath + '/cmake_targets/')
mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, './' + fileToAnalyze, self.eNBSourceCodePath + '/cmake_targets/')
logging.debug('\u001B[1m Analyzing ' + nodeB_prefix + 'NB logfile \u001B[0m ' + fileToAnalyze)
logStatus = self.AnalyzeLogFile_eNB(fileToAnalyze, HTML, self.ran_checkers)
if (logStatus < 0):
HTML.CreateHtmlTestRow('N/A', 'KO', logStatus)
#display rt stats for gNB only
if len(self.datalog_rt_stats)!=0 and nodeB_prefix == 'g':
HTML.CreateHtmlDataLogTable(self.datalog_rt_stats)
self.eNBmbmsEnables[int(self.eNB_instance)] = False
return False
else:
HTML.CreateHtmlTestRow(self.runtime_stats, 'OK', CONST.ALL_PROCESSES_OK)
else:
HTML.CreateHtmlTestRow(self.runtime_stats, 'OK', CONST.ALL_PROCESSES_OK)
#display rt stats for gNB only
if len(self.datalog_rt_stats)!=0 and nodeB_prefix == 'g':
HTML.CreateHtmlDataLogTable(self.datalog_rt_stats)
self.eNBmbmsEnables[int(self.eNB_instance)] = False
self.eNBstatuses[int(self.eNB_instance)] = -1
return True
def LogCollecteNB(self):
mySSH = SSH.SSHConnection()
# Copying back to xNB server any log from all the runs.
# Should also contains ping and iperf logs
absPath = os.path.abspath('.')
if absPath.count('ci-scripts') == 0:
os.chdir('./ci-scripts')
for x in os.listdir():
if x.endswith('.log') or x.endswith('.log.png'):
mySSH.copyout(self.eNBIPAddress, self.eNBUserName, self.eNBPassword, x, self.eNBSourceCodePath + '/cmake_targets/', silent=True, ignorePermDenied=True)
# Back to normal
mySSH.open(self.eNBIPAddress, self.eNBUserName, self.eNBPassword)
mySSH.command('cd ' + self.eNBSourceCodePath, '\$', 5)
mySSH.command('cd cmake_targets', '\$', 5)
mySSH.command('echo ' + self.eNBPassword + ' | sudo -S mv /tmp/enb_*.pcap .','\$',20)
mySSH.command('echo ' + self.eNBPassword + ' | sudo -S mv /tmp/gnb_*.pcap .','\$',20)
mySSH.command('echo ' + self.eNBPassword + ' | sudo -S rm -f enb.log.zip', '\$', 5)
mySSH.command('echo ' + self.eNBPassword + ' | sudo -S zip enb.log.zip *.log enb_*record.raw enb_*.pcap gnb_*.pcap enb_*txt physim_*.log *stats.log *monitor.pickle *monitor*.png ping*.log* iperf*.log log/*/*.log log/*/*.pcap', '\$', 60)
result = re.search('core.\d+', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.eNBPassword + ' | sudo -S zip enb.log.zip core* ran_build/build/{lte,nr}-softmodem', '\$', 60) # add core and executable to zip
mySSH.command('echo ' + self.eNBPassword + ' | sudo -S rm enb*.log core* enb_*record.raw enb_*.pcap gnb_*.pcap enb_*txt physim_*.log *stats.log *monitor.pickle *monitor*.png ping*.log* iperf*.log log/*/*.log log/*/*.pcap', '\$', 15)
mySSH.close()
def _analyzeUeRetx(self, rounds, checkers, regex):
if len(rounds) == 0 or len(checkers) == 0:
logging.warning(f'warning: rounds={rounds} checkers={checkers}')
return []
perc = list(0 for i in checkers) # results in %
stats = list(False for i in checkers) # status if succeeded
tmp = re.match(regex, rounds)
if tmp is None:
logging.error('_analyzeUeRetx: did not match regex for DL retx analysis')
return stats
retx_data = [float(x) for x in tmp.groups()]
for i in range(0, len(perc)):
#case where numerator > denumerator with denum ==0 is disregarded, cannot hapen in principle, will lead to 0%
perc[i] = 0 if (retx_data[i] == 0) else 100 * retx_data[i + 1] / retx_data[i]
#treating % > 100 , % > requirement
stats[i] = perc[i] <= 100 and perc[i] <= checkers[i]
return stats
def AnalyzeLogFile_eNB(self, eNBlogFile, HTML, checkers={}):
if (not os.path.isfile(eNBlogFile)):
return -1
enb_log_file = open(eNBlogFile, 'r')
exitSignalReceived = False
foundAssertion = False
msgAssertion = ''
msgLine = 0
foundSegFault = False
foundRealTimeIssue = False
foundRealTimeIssue_cnt = 0
rrcSetupComplete = 0
rrcReleaseRequest = 0
rrcReconfigRequest = 0
rrcReconfigComplete = 0
rrcReestablishRequest = 0
rrcReestablishComplete = 0
rrcReestablishReject = 0
rlcDiscardBuffer = 0
rachCanceledProcedure = 0
uciStatMsgCount = 0
pdcpFailure = 0
ulschFailure = 0
ulschAllocateCCEerror = 0
uplinkSegmentsAborted = 0
ulschReceiveOK = 0
gnbRxTxWakeUpFailure = 0
gnbTxWriteThreadEnabled = False
cdrxActivationMessageCount = 0
dropNotEnoughRBs = 0
mbmsRequestMsg = 0
htmleNBFailureMsg = ''
isRRU = False
isSlave = False
slaveReceivesFrameResyncCmd = False
X2HO_state = CONST.X2_HO_REQ_STATE__IDLE
X2HO_inNbProcedures = 0
X2HO_outNbProcedures = 0
global_status = CONST.ALL_PROCESSES_OK
# Runtime statistics
runTime = ''
userTime = ''
systemTime = ''
maxPhyMemUsage = ''
nbContextSwitches = ''
#NSA FR1 check
NSA_RAPROC_PUSCH_check = 0
#dlsch and ulsch statistics (dictionary)
dlsch_ulsch_stats = {}
#real time statistics (dictionary)
real_time_stats = {}
#count "problem receiving samples" msg
pb_receiving_samples_cnt = 0
#count "removing UE" msg
removing_ue = 0
#count"X2AP-PDU"
x2ap_pdu = 0
#gnb specific log markers
gnb_markers ={'SgNBReleaseRequestAcknowledge': [],'FAILURE': [], 'scgFailureInformationNR-r15': [], 'SgNBReleaseRequest': [], 'Detected UL Failure on PUSCH':[]}
nodeB_prefix_found = False
RealTimeProcessingIssue = False
retx_status = {}
nrRrcRcfgComplete = 0
harqFeedbackPast = 0
showedByeMsg = False # last line is Bye. -> stopped properly
line_cnt=0 #log file line counter
for line in enb_log_file.readlines():
line_cnt+=1
# Detection of eNB/gNB from a container log
result = re.search('Starting eNB soft modem', str(line))
if result is not None:
nodeB_prefix_found = True
nodeB_prefix = 'e'
result = re.search('Starting gNB soft modem', str(line))
if result is not None:
nodeB_prefix_found = True
nodeB_prefix = 'g'
result = re.search('Run time:' ,str(line))
# Runtime statistics
result = re.search('Run time:' ,str(line))
if result is not None:
runTime = str(line).strip()
if runTime != '':
result = re.search('Time executing user inst', str(line))
if result is not None:
fields=line.split(':')
userTime = 'userTime : ' + fields[1].replace('\n','')
result = re.search('Time executing system inst', str(line))
if result is not None:
fields=line.split(':')
systemTime = 'systemTime : ' + fields[1].replace('\n','')
result = re.search('Max. Phy. memory usage:', str(line))
if result is not None:
fields=line.split(':')
maxPhyMemUsage = 'maxPhyMemUsage : ' + fields[1].replace('\n','')
result = re.search('Number of context switch.*process origin', str(line))
if result is not None:
fields=line.split(':')
nbContextSwitches = 'nbContextSwitches : ' + fields[1].replace('\n','')
if X2HO_state == CONST.X2_HO_REQ_STATE__IDLE:
result = re.search('target eNB Receives X2 HO Req X2AP_HANDOVER_REQ', str(line))
if result is not None:
X2HO_state = CONST.X2_HO_REQ_STATE__TARGET_RECEIVES_REQ
result = re.search('source eNB receives the X2 HO ACK X2AP_HANDOVER_REQ_ACK', str(line))
if result is not None:
X2HO_state = CONST.X2_HO_REQ_STATE__SOURCE_RECEIVES_REQ_ACK
if X2HO_state == CONST.X2_HO_REQ_STATE__TARGET_RECEIVES_REQ:
result = re.search('Received LTE_RRCConnectionReconfigurationComplete from UE', str(line))
if result is not None:
X2HO_state = CONST.X2_HO_REQ_STATE__TARGET_RRC_RECFG_COMPLETE
if X2HO_state == CONST.X2_HO_REQ_STATE__TARGET_RRC_RECFG_COMPLETE:
result = re.search('issue rrc_eNB_send_PATH_SWITCH_REQ', str(line))
if result is not None:
X2HO_state = CONST.X2_HO_REQ_STATE__TARGET_SENDS_SWITCH_REQ
if X2HO_state == CONST.X2_HO_REQ_STATE__TARGET_SENDS_SWITCH_REQ:
result = re.search('received path switch ack S1AP_PATH_SWITCH_REQ_ACK', str(line))
if result is not None:
X2HO_state = CONST.X2_HO_REQ_STATE__IDLE
X2HO_inNbProcedures += 1
if X2HO_state == CONST.X2_HO_REQ_STATE__SOURCE_RECEIVES_REQ_ACK:
result = re.search('source eNB receives the X2 UE CONTEXT RELEASE X2AP_UE_CONTEXT_RELEASE', str(line))
if result is not None:
X2HO_state = CONST.X2_HO_REQ_STATE__IDLE
X2HO_outNbProcedures += 1
if self.eNBOptions[int(self.eNB_instance)] != '':
res1 = re.search('max_rxgain (?P<requested_option>[0-9]+)', self.eNBOptions[int(self.eNB_instance)])
res2 = re.search('max_rxgain (?P<applied_option>[0-9]+)', str(line))
if res1 is not None and res2 is not None:
requested_option = int(res1.group('requested_option'))
applied_option = int(res2.group('applied_option'))
if requested_option == applied_option:
htmleNBFailureMsg += '<span class="glyphicon glyphicon-ok-circle"></span> Command line option(s) correctly applied <span class="glyphicon glyphicon-arrow-right"></span> ' + self.eNBOptions[int(self.eNB_instance)] + '\n\n'
else:
htmleNBFailureMsg += '<span class="glyphicon glyphicon-ban-circle"></span> Command line option(s) NOT applied <span class="glyphicon glyphicon-arrow-right"></span> ' + self.eNBOptions[int(self.eNB_instance)] + '\n\n'
result = re.search('Exiting OAI softmodem|Caught SIGTERM, shutting down', str(line))
if result is not None:
exitSignalReceived = True
result = re.search('[Ss]egmentation [Ff]ault', str(line))
if result is not None and not exitSignalReceived:
foundSegFault = True
result = re.search('[Cc]ore [dD]ump', str(line))
if result is not None and not exitSignalReceived:
foundSegFault = True
result = re.search('[Aa]ssertion', str(line))
if result is not None and not exitSignalReceived:
foundAssertion = True
result = re.search('LLL', str(line))
if result is not None and not exitSignalReceived:
foundRealTimeIssue = True
foundRealTimeIssue_cnt += 1
if foundAssertion and (msgLine < 3):
msgLine += 1
msgAssertion += str(line)
result = re.search('Setting function for RU', str(line))
if result is not None:
isRRU = True
if isRRU:
result = re.search('RU 0 is_slave=yes', str(line))
if result is not None:
isSlave = True
if isSlave:
result = re.search('Received RRU_frame_resynch command', str(line))
if result is not None:
slaveReceivesFrameResyncCmd = True
result = re.search('LTE_RRCConnectionSetupComplete from UE', str(line))
if result is not None:
rrcSetupComplete += 1
result = re.search('Generate LTE_RRCConnectionRelease|Generate RRCConnectionRelease', str(line))
if result is not None: rrcReleaseRequest += 1
result = re.search('Generate LTE_RRCConnectionReconfiguration', str(line))
if result is not None:
rrcReconfigRequest += 1
result = re.search('LTE_RRCConnectionReconfigurationComplete from UE rnti', str(line))
if result is not None:
rrcReconfigComplete += 1
result = re.search('LTE_RRCConnectionReestablishmentRequest', str(line))
if result is not None:
rrcReestablishRequest += 1
result = re.search('LTE_RRCConnectionReestablishmentComplete', str(line))
if result is not None:
rrcReestablishComplete += 1
result = re.search('LTE_RRCConnectionReestablishmentReject', str(line))
if result is not None:
rrcReestablishReject += 1
result = re.search('CDRX configuration activated after RRC Connection', str(line))
if result is not None:
cdrxActivationMessageCount += 1
result = re.search('uci->stat', str(line))
if result is not None:
uciStatMsgCount += 1
result = re.search('PDCP.*Out of Resources.*reason', str(line))
if result is not None:
pdcpFailure += 1
result = re.search('could not wakeup gNB rxtx process', str(line))
if result is not None:
gnbRxTxWakeUpFailure += 1
result = re.search('tx write thread ready', str(line))
if result is not None:
gnbTxWriteThreadEnabled = True
result = re.search('ULSCH in error in round|ULSCH 0 in error', str(line))
if result is not None:
ulschFailure += 1
result = re.search('ERROR ALLOCATING CCEs', str(line))
if result is not None:
ulschAllocateCCEerror += 1
result = re.search('uplink segment error.*aborted [1-9] segments', str(line))
if result is not None:
uplinkSegmentsAborted += 1
result = re.search('ULSCH received ok', str(line))
if result is not None:
ulschReceiveOK += 1
result = re.search('BAD all_segments_received', str(line))
if result is not None:
rlcDiscardBuffer += 1
result = re.search('Canceled RA procedure for UE rnti', str(line))
if result is not None:
rachCanceledProcedure += 1
result = re.search('dropping, not enough RBs', str(line))
if result is not None:
dropNotEnoughRBs += 1
if self.eNBmbmsEnables[int(self.eNB_instance)]:
result = re.search('MBMS USER-PLANE.*Requesting.*bytes from RLC', str(line))
if result is not None:
mbmsRequestMsg += 1
#FR1 NSA test : add new markers to make sure gNB is used
result = re.search('\[gNB [0-9]+\]\[RAPROC\] PUSCH with TC_RNTI 0x[0-9a-fA-F]+ received correctly, adding UE MAC Context RNTI 0x[0-9a-fA-F]+', str(line))
if result is not None:
NSA_RAPROC_PUSCH_check = 1
# Collect information on UE DLSCH and ULSCH statistics
keys = {'dlsch_rounds','ulsch_rounds'}
for k in keys:
result = re.search(k, line)
if result is None:
continue
result = re.search('UE (?:RNTI )?([0-9a-f]{4})', line)
if result is None:
logging.error(f'did not find RNTI while matching key {k}')
continue
rnti = result.group(1)
#remove 1- all useless char before relevant info (ulsch or dlsch) 2- trailing char
if not rnti in dlsch_ulsch_stats: dlsch_ulsch_stats[rnti] = {}
dlsch_ulsch_stats[rnti][k]=re.sub(r'^.*\]\s+', r'' , line.rstrip())
result = re.search('Received NR_RRCReconfigurationComplete from UE', str(line))
if result is not None:
nrRrcRcfgComplete += 1
result = re.search('HARQ feedback is in the past', str(line))
if result is not None:
harqFeedbackPast += 1
#count "problem receiving samples" msg
result = re.search('\[PHY\]\s+problem receiving samples', str(line))
if result is not None:
pb_receiving_samples_cnt += 1
#count "Removing UE" msg
result = re.search('\[MAC\]\s+Removing UE', str(line))
if result is not None:
removing_ue += 1
#count "X2AP-PDU"
result = re.search('X2AP-PDU', str(line))
if result is not None:
x2ap_pdu += 1
#gnb markers logging
for k in gnb_markers:
result = re.search(k, line)
if result is not None:
gnb_markers[k].append(line_cnt)
# check whether e/gNB log finishes with "Bye." message
showedByeMsg |= re.search(r'^Bye.\n', str(line), re.MULTILINE) is not None
enb_log_file.close()
#the following part takes the *_stats.log files as source (not the stdout log file)
#the datalog config file has to be loaded
datalog_rt_stats_file=self.datalog_rt_stats_file
if (os.path.isfile(datalog_rt_stats_file)):
yaml_file=datalog_rt_stats_file
elif (os.path.isfile('ci-scripts/'+datalog_rt_stats_file)):
yaml_file='ci-scripts/'+datalog_rt_stats_file
else:
logging.error("Datalog RT stats yaml file cannot be found")
sys.exit("Datalog RT stats yaml file cannot be found")
with open(yaml_file,'r') as f:
datalog_rt_stats = yaml.load(f,Loader=yaml.FullLoader)
rt_keys = datalog_rt_stats['Ref'] #we use the keys from the Ref field
if os.path.isfile('./nrL1_stats.log') and os.path.isfile('./nrMAC_stats.log'):
# don't use CI-nrL1_stats.log, as this will increase the processing time for
# no reason, we just need the last occurence
nrL1_stats = open('./nrL1_stats.log', 'r')
nrMAC_stats = open('./nrMAC_stats.log', 'r')
for line in nrL1_stats.readlines():
for k in rt_keys:
result = re.search(k, line)
if result is not None:
#remove 1- all useless char before relevant info 2- trailing char
tmp=re.match(rf'^.*?(\b{k}\b.*)',line.rstrip()) #from python 3.6 we can use literal string interpolation for the variable k, using rf' in the regex
if tmp!=None:
real_time_stats[k]=tmp.group(1)
for line in nrMAC_stats.readlines():
for k in rt_keys:
result = re.search(k, line)
if result is not None:
#remove 1- all useless char before relevant info 2- trailing char
tmp=re.match(rf'^.*?(\b{k}\b.*)',line.rstrip()) #from python 3.6 we can use literal string interpolation for the variable k, using rf' in the regex
if tmp!=None:
real_time_stats[k]=tmp.group(1)
nrL1_stats.close()
nrMAC_stats.close()
else:
logging.debug("NR Stats files for RT analysis not found")
#stdout log file and stat log files analysis completed
logging.debug(' File analysis (stdout, stats) completed')
#post processing depending on the node type
if not nodeB_prefix_found:
if self.air_interface[self.eNB_instance] == 'lte-softmodem':
nodeB_prefix = 'e'
else:
nodeB_prefix = 'g'
if nodeB_prefix == 'g':
if ulschReceiveOK > 0:
statMsg = nodeB_prefix + 'NB showed ' + str(ulschReceiveOK) + ' "ULSCH received ok" message(s)'
logging.debug('\u001B[1;30;43m ' + statMsg + ' \u001B[0m')
htmleNBFailureMsg += statMsg + '\n'
if gnbRxTxWakeUpFailure > 0:
statMsg = nodeB_prefix + 'NB showed ' + str(gnbRxTxWakeUpFailure) + ' "could not wakeup gNB rxtx process" message(s)'
logging.debug('\u001B[1;30;43m ' + statMsg + ' \u001B[0m')
htmleNBFailureMsg += statMsg + '\n'
if gnbTxWriteThreadEnabled:
statMsg = nodeB_prefix + 'NB ran with TX Write thread enabled'
logging.debug('\u001B[1;30;43m ' + statMsg + ' \u001B[0m')
htmleNBFailureMsg += statMsg + '\n'
if nrRrcRcfgComplete > 0:
statMsg = nodeB_prefix + 'NB showed ' + str(nrRrcRcfgComplete) + ' "Received NR_RRCReconfigurationComplete from UE" message(s)'
logging.debug('\u001B[1;30;43m ' + statMsg + ' \u001B[0m')
htmleNBFailureMsg += statMsg + '\n'
if harqFeedbackPast > 0:
statMsg = nodeB_prefix + 'NB showed ' + str(harqFeedbackPast) + ' "HARQ feedback is in the past" message(s)'
logging.debug('\u001B[1;30;43m ' + statMsg + ' \u001B[0m')
htmleNBFailureMsg += statMsg + '\n'
#FR1 NSA test : add new markers to make sure gNB is used
if NSA_RAPROC_PUSCH_check:
statMsg = '[RAPROC] PUSCH with TC_RNTI message check for ' + nodeB_prefix + 'NB : PASS '
htmlMsg = statMsg+'\n'
else:
statMsg = '[RAPROC] PUSCH with TC_RNTI message check for ' + nodeB_prefix + 'NB : FAIL or not relevant'
htmlMsg = statMsg+'\n'
logging.debug(statMsg)
htmleNBFailureMsg += htmlMsg
#problem receiving samples log
statMsg = '[PHY] problem receiving samples msg count = '+str(pb_receiving_samples_cnt)
htmlMsg = statMsg+'\n'
logging.debug(statMsg)
htmleNBFailureMsg += htmlMsg
#gnb markers
statMsg = 'logfile line count = ' + str(line_cnt)
htmlMsg = statMsg+'\n'
logging.debug(statMsg)
htmleNBFailureMsg += htmlMsg
if len(gnb_markers['SgNBReleaseRequestAcknowledge'])!=0:
statMsg = 'SgNBReleaseRequestAcknowledge = ' + str(len(gnb_markers['SgNBReleaseRequestAcknowledge'])) + ' occurences , starting line ' + str(gnb_markers['SgNBReleaseRequestAcknowledge'][0])
else:
statMsg = 'SgNBReleaseRequestAcknowledge = ' + str(len(gnb_markers['SgNBReleaseRequestAcknowledge'])) + ' occurences'
htmlMsg = statMsg+'\n'
logging.debug(statMsg)
htmleNBFailureMsg += htmlMsg
statMsg = 'FAILURE = ' + str(len(gnb_markers['FAILURE'])) + ' occurences'
htmlMsg = statMsg+'\n'
logging.debug(statMsg)
htmleNBFailureMsg += htmlMsg
statMsg = 'Detected UL Failure on PUSCH = ' + str(len(gnb_markers['Detected UL Failure on PUSCH'])) + ' occurences'
htmlMsg = statMsg+'\n'
logging.debug(statMsg)
htmleNBFailureMsg += htmlMsg
#ulsch and dlsch statistics and checkers
for ue in dlsch_ulsch_stats:
dlulstat = dlsch_ulsch_stats[ue]
#print statistics into html
statMsg=''
for key in dlulstat:
statMsg += dlulstat[key] + '\n'
logging.debug(dlulstat[key])
htmleNBFailureMsg += statMsg
retx_status[ue] = {}
dlcheckers = [] if 'd_retx_th' not in checkers else checkers['d_retx_th']
retx_status[ue]['dl'] = self._analyzeUeRetx(dlulstat['dlsch_rounds'], dlcheckers, r'^.*dlsch_rounds\s+(\d+)\/(\d+)\/(\d+)\/(\d+),\s+dlsch_errors\s+(\d+)')
ulcheckers = [] if 'u_retx_th' not in checkers else checkers['u_retx_th']
retx_status[ue]['ul'] = self._analyzeUeRetx(dlulstat['ulsch_rounds'], ulcheckers, r'^.*ulsch_rounds\s+(\d+)\/(\d+)\/(\d+)\/(\d+),\s+ulsch_errors\s+(\d+)')
#real time statistics
datalog_rt_stats['Data']={}
if len(real_time_stats)!=0: #check if dictionary is not empty
for k in real_time_stats:
tmp=re.match(r'^(?P<metric>.*):\s+(?P<avg>\d+\.\d+) us;\s+(?P<count>\d+);\s+(?P<max>\d+\.\d+) us;',real_time_stats[k])
if tmp is not None:
metric=tmp.group('metric')
avg=float(tmp.group('avg'))
max=float(tmp.group('max'))
count=int(tmp.group('count'))
datalog_rt_stats['Data'][metric]=["{:.0f}".format(avg),"{:.0f}".format(max),"{:d}".format(count),"{:.2f}".format(avg/datalog_rt_stats['Ref'][metric])]
#once all metrics are collected, store the data as a class attribute to build a dedicated HTML table afterward
self.datalog_rt_stats=datalog_rt_stats
#check if there is a fail => will render the test as failed
for k in datalog_rt_stats['Data']:
valnorm = float(datalog_rt_stats['Data'][k][3])
dev = datalog_rt_stats['DeviationThreshold'][k]
if valnorm > 1.0 + dev or valnorm < 1.0 - dev: # condition for fail : avg/ref deviates by more than "deviation threshold"
logging.debug(f'\u001B[1;30;43m normalized datalog_rt_stats metric {k}={valnorm} deviates by more than {dev}\u001B[0m')
RealTimeProcessingIssue = True
else:
statMsg = 'No real time stats found in the log file\n'
logging.debug('No real time stats found in the log file')
htmleNBFailureMsg += statMsg
if not showedByeMsg:
logging.debug('\u001B[1;37;41m ' + nodeB_prefix + 'NB did not show "Bye." message at end, it likely did not stop properly! \u001B[0m')
htmleNBFailureMsg += 'No Bye. message found, did not stop properly\n'
global_status = CONST.ENB_SHUTDOWN_NO_BYE
else:
logging.debug('"Bye." message found at end.')
else:
#Removing UE log
statMsg = '[MAC] Removing UE msg count = '+str(removing_ue)
htmlMsg = statMsg+'\n'
logging.debug(statMsg)
htmleNBFailureMsg += htmlMsg
#X2AP-PDU log
statMsg = 'X2AP-PDU msg count = '+str(x2ap_pdu)
htmlMsg = statMsg+'\n'
logging.debug(statMsg)
htmleNBFailureMsg += htmlMsg
#nsa markers
statMsg = 'logfile line count = ' + str(line_cnt)
htmlMsg = statMsg+'\n'
logging.debug(statMsg)
htmleNBFailureMsg += htmlMsg
if len(gnb_markers['SgNBReleaseRequest'])!=0:
statMsg = 'SgNBReleaseRequest = ' + str(len(gnb_markers['SgNBReleaseRequest'])) + ' occurences , starting line ' + str(gnb_markers['SgNBReleaseRequest'][0])
else:
statMsg = 'SgNBReleaseRequest = ' + str(len(gnb_markers['SgNBReleaseRequest'])) + ' occurences'
htmlMsg = statMsg+'\n'
logging.debug(statMsg)
htmleNBFailureMsg += htmlMsg
statMsg = 'scgFailureInformationNR-r15 = ' + str(len(gnb_markers['scgFailureInformationNR-r15'])) + ' occurences'
htmlMsg = statMsg+'\n'
logging.debug(statMsg)
htmleNBFailureMsg += htmlMsg
for ue in retx_status:
msg = f"retransmissions for UE {ue}: DL {retx_status[ue]['dl']} UL {retx_status[ue]['ul']}"
if False in retx_status[ue]['dl'] or False in retx_status[ue]['ul']:
msg = 'Failure: ' + msg
logging.error(f'\u001B[1;37;41m {msg}\u001B[0m')
htmleNBFailureMsg += f'{msg}\n'
global_status = CONST.ENB_RETX_ISSUE
else:
logging.debug(msg)
if RealTimeProcessingIssue:
logging.debug('\u001B[1;37;41m ' + nodeB_prefix + 'NB ended with real time processing issue! \u001B[0m')
htmleNBFailureMsg += 'Fail due to real time processing issue\n'
global_status = CONST.ENB_REAL_TIME_PROCESSING_ISSUE
if uciStatMsgCount > 0:
statMsg = nodeB_prefix + 'NB showed ' + str(uciStatMsgCount) + ' "uci->stat" message(s)'
logging.debug('\u001B[1;30;43m ' + statMsg + ' \u001B[0m')
htmleNBFailureMsg += statMsg + '\n'
if pdcpFailure > 0:
statMsg = nodeB_prefix + 'NB showed ' + str(pdcpFailure) + ' "PDCP Out of Resources" message(s)'
logging.debug('\u001B[1;30;43m ' + statMsg + ' \u001B[0m')
htmleNBFailureMsg += statMsg + '\n'
if ulschFailure > 0:
statMsg = nodeB_prefix + 'NB showed ' + str(ulschFailure) + ' "ULSCH in error in round" message(s)'
logging.debug('\u001B[1;30;43m ' + statMsg + ' \u001B[0m')
htmleNBFailureMsg += statMsg + '\n'
if ulschAllocateCCEerror > 0:
statMsg = nodeB_prefix + 'NB showed ' + str(ulschAllocateCCEerror) + ' "eNB_dlsch_ulsch_scheduler(); ERROR ALLOCATING CCEs" message(s)'
logging.debug('\u001B[1;30;43m ' + statMsg + ' \u001B[0m')
htmleNBFailureMsg += statMsg + '\n'
if uplinkSegmentsAborted > 0:
statMsg = nodeB_prefix + 'NB showed ' + str(uplinkSegmentsAborted) + ' "uplink segment error 0/2, aborted * segments" message(s)'
logging.debug('\u001B[1;30;43m ' + statMsg + ' \u001B[0m')
htmleNBFailureMsg += statMsg + '\n'
if dropNotEnoughRBs > 0:
statMsg = 'eNB showed ' + str(dropNotEnoughRBs) + ' "dropping, not enough RBs" message(s)'
logging.debug('\u001B[1;30;43m ' + statMsg + ' \u001B[0m')
htmleNBFailureMsg += statMsg + '\n'
if rrcSetupComplete > 0:
rrcMsg = nodeB_prefix + 'NB completed ' + str(rrcSetupComplete) + ' RRC Connection Setup(s)'
logging.debug('\u001B[1;30;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
rrcMsg = ' -- ' + str(rrcSetupComplete) + ' were completed'
logging.debug('\u001B[1;30;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
if rrcReleaseRequest > 0:
rrcMsg = nodeB_prefix + 'NB requested ' + str(rrcReleaseRequest) + ' RRC Connection Release(s)'
logging.debug('\u001B[1;30;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
if rrcReconfigRequest > 0 or rrcReconfigComplete > 0:
rrcMsg = nodeB_prefix + 'NB requested ' + str(rrcReconfigRequest) + ' RRC Connection Reconfiguration(s)'
logging.debug('\u001B[1;30;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
rrcMsg = ' -- ' + str(rrcReconfigComplete) + ' were completed'
logging.debug('\u001B[1;30;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
if rrcReestablishRequest > 0 or rrcReestablishComplete > 0 or rrcReestablishReject > 0:
rrcMsg = nodeB_prefix + 'NB requested ' + str(rrcReestablishRequest) + ' RRC Connection Reestablishment(s)'
logging.debug('\u001B[1;30;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
rrcMsg = ' -- ' + str(rrcReestablishComplete) + ' were completed'
logging.debug('\u001B[1;30;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
rrcMsg = ' -- ' + str(rrcReestablishReject) + ' were rejected'
logging.debug('\u001B[1;30;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
if self.eNBmbmsEnables[int(self.eNB_instance)]:
if mbmsRequestMsg > 0:
rrcMsg = 'eNB requested ' + str(mbmsRequestMsg) + ' times the RLC for MBMS USER-PLANE'
logging.debug('\u001B[1;30;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
if X2HO_inNbProcedures > 0:
rrcMsg = 'eNB completed ' + str(X2HO_inNbProcedures) + ' X2 Handover Connection procedure(s)'
logging.debug('\u001B[1;30;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
if X2HO_outNbProcedures > 0:
rrcMsg = 'eNB completed ' + str(X2HO_outNbProcedures) + ' X2 Handover Release procedure(s)'
logging.debug('\u001B[1;30;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
if self.eNBOptions[int(self.eNB_instance)] != '':
res1 = re.search('drx_Config_present prSetup', self.eNBOptions[int(self.eNB_instance)])
if res1 is not None:
if cdrxActivationMessageCount > 0:
rrcMsg = 'eNB activated the CDRX Configuration for ' + str(cdrxActivationMessageCount) + ' time(s)'
logging.debug('\u001B[1;30;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
else:
rrcMsg = 'eNB did NOT ACTIVATE the CDRX Configuration'
logging.debug('\u001B[1;37;43m ' + rrcMsg + ' \u001B[0m')
htmleNBFailureMsg += rrcMsg + '\n'
if rachCanceledProcedure > 0:
rachMsg = nodeB_prefix + 'NB cancelled ' + str(rachCanceledProcedure) + ' RA procedure(s)'
logging.debug('\u001B[1;30;43m ' + rachMsg + ' \u001B[0m')
htmleNBFailureMsg += rachMsg + '\n'
if isRRU:
if isSlave:
if slaveReceivesFrameResyncCmd:
rruMsg = 'Slave RRU received the RRU_frame_resynch command from RAU'
logging.debug('\u001B[1;30;43m ' + rruMsg + ' \u001B[0m')
htmleNBFailureMsg += rruMsg + '\n'
else:
rruMsg = 'Slave RRU DID NOT receive the RRU_frame_resynch command from RAU'
logging.debug('\u001B[1;37;41m ' + rruMsg + ' \u001B[0m')
htmleNBFailureMsg += rruMsg + '\n'
global_status = CONST.ENB_PROCESS_SLAVE_RRU_NOT_SYNCED
if foundSegFault:
logging.debug('\u001B[1;37;41m ' + nodeB_prefix + 'NB ended with a Segmentation Fault! \u001B[0m')
global_status = CONST.ENB_PROCESS_SEG_FAULT
if foundAssertion:
logging.debug('\u001B[1;37;41m ' + nodeB_prefix + 'NB ended with an assertion! \u001B[0m')
htmleNBFailureMsg += msgAssertion
global_status = CONST.ENB_PROCESS_ASSERTION
if foundRealTimeIssue:
logging.debug('\u001B[1;37;41m ' + nodeB_prefix + 'NB faced real time issues! \u001B[0m')
htmleNBFailureMsg += nodeB_prefix + 'NB faced real time issues! COUNT = '+ str(foundRealTimeIssue_cnt) +' lines\n'
if rlcDiscardBuffer > 0:
rlcMsg = nodeB_prefix + 'NB RLC discarded ' + str(rlcDiscardBuffer) + ' buffer(s)'
logging.debug('\u001B[1;37;41m ' + rlcMsg + ' \u001B[0m')
htmleNBFailureMsg += rlcMsg + '\n'
global_status = CONST.ENB_PROCESS_REALTIME_ISSUE
HTML.htmleNBFailureMsg=htmleNBFailureMsg
# Runtime statistics for console output and HTML
if runTime != '':
logging.debug(runTime)
logging.debug(userTime)
logging.debug(systemTime)
logging.debug(maxPhyMemUsage)
logging.debug(nbContextSwitches)
self.runtime_stats='<pre>'+runTime + '\n'+ userTime + '\n' + systemTime + '\n' + maxPhyMemUsage + '\n' + nbContextSwitches+'</pre>'
return global_status