Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • pasolini/openairinterface5g
  • odukan/openairinterface5g
  • ewa/openairinterface5g
  • deksprime/openairinterface5g
  • jackokie/openairinterface5g
  • Srushti16/openairinterface5g
  • BRodolphe/openairinterface5g
  • kramantas/openairinterface5g
  • suraj_4g5g/openairinterface5g
  • turletti/openairinterface5g
  • anandriisc/openairinterface5g
  • lvguorong/openairinterface5g
  • dast/openairinterface5g
  • yashwanthr/openairinterface5g
  • ajiti2tb/openairinterface5g
  • qzhou/openairinterface5g
  • nickmxxx/openairinterface5g
  • bin_he4/openairinterface5g
  • delarco/openairinterface5g
  • limx1980/openairinterface5g
  • Aniq/openairinterface5g
  • yassir63/openairinterface5g
  • orc318/openairinterface5g
  • vader/openairinterface5g
  • limx59/openairinterface5g
  • nadavaati_12345/openairinterface5g
  • jenshz/openairinterface5g
  • kuldeep/openairinterface5g
  • lurker/openairinterface5g
  • shariat/openairinterface5g
  • Alireza.najafzadeh/openairinterface5g
  • Ling/openairinterface5g
  • EvanKrall/openairinterface5g
  • youyih/openairinterface5g
  • anindya/openairinterface5g
  • ahan/openairinterface5g
  • beraoud/openairinterface5g
  • obejarano/openairinterface5g
  • Monti/openairinterface5g
  • akhamsi/openairinterface5g
  • Worker.N/openairinterface5g
  • zhangtu/openairinterface5g
  • desouza/openairinterface5g
  • zhijun/openairinterface5g
  • sureshkumar/openairinterface5g
  • milan/openairinterface5g
  • bigbangbingo/openairinterface5g
  • platini/openairinterface5g
  • muralir-nv/openairinterface5g
  • Joshua_Zhang/openairinterface5g
  • siddharthmurali1/openairinterface5g
  • sorinros/openairinterface5g
  • elainecao/openairinterface5g
  • sneltved/openairinterface5g
  • aikaterini.trilyraki/openairinterface5g
  • wujunning11/openairinterface5g
  • magounak/openairinterface5g
  • ycl1729020039/openairinterface5g
  • mayukhweb/openairinterface5g
  • wataru/openairinterface5g
  • afonsoli/openairinterface5g
  • ppokar/openairinterface5g
  • emest/openairinterface5g
  • Najib/openairinterface5g
  • liqing/openairinterface5g
  • gprshome/openairinterface5g
  • Dvevgedveccc/openairinterface5g
  • Elena_Lukashova/openairinterface5g
  • imaneouss/openairinterface5g
  • yangyuan/openairinterface5g
  • ycliang/openairinterface5g
  • rohanfds/openairinterface5g
  • cong2008abc/openairinterface5g
  • Giovanni/openairinterface5g
  • willvegapunk/openairinterface5g
  • Chen/openairinterface5g
  • Ella/openairinterface5g
  • kollabalu/openairinterface5g
  • tsaichanglan/openairinterface5g
  • Artifice/openairinterface5g
  • HJR0129/openairinterface5g
  • alextp/openairinterface5g
  • Changron/openairinterface5g
  • pedosb/openairinterface5g
  • Flozzen/openairinterface5g
  • hobei/openairinterface5g
  • WP_Jing/openairinterface5g
  • reset4/openairinterface5g
  • alexjoseph/openairinterface5g
  • latuan1710/openairinterface5g
  • wynter-wang/openairinterface5g
  • stt12706/openairinterface5g
  • sy/openairinterface5g
  • dzxu/openairinterface5g
  • ptizoom/openairinterface5g
  • Thierry/openairinterface5g
  • tjamc80/openairinterface5g
  • yenmuse/openairinterface5g
  • archerling/openairinterface5g
  • grahul/openairinterface5g
  • ashish.shri/openairinterface5g
  • TianyuChen/openairinterface5g
  • cuixf1/openairinterface5g
  • Jan/openairinterface5g
  • jboatenng/openairinterface5g_gpio
  • geokal/openairinterface5g
  • johannhg/openairinterface5g
  • TofunmiA/openairinterface5g
  • razvanursu/openairinterface5g-mac-scheduling
  • Julio/openairinterface5g
  • fredrichx/openairinterface5g
  • nems/openairinterface5g
  • wb_li/openairinterface5g
  • ferrieux/openairinterface5g
  • prajna_g/openairinterface-5-g-xnap-ho
  • mtinasc/openairinterface5g
  • Hofschroeer/openairinterface5g
  • buptxiaofeng/openairinterface5g
  • fjgh_759/openairinterface5g
  • calcel/openairinterface5g
  • Reem/openairinterface5g
  • havar_mind/openairinterface5g
  • shrinish/openairinterface5g
  • YANGHELINDE/openairinterface5g
  • lool/openairinterface5g
  • raghav1900/openairinterface5g
  • allan1201/openairinterface5g
  • ferris/openairinterface5g
  • seanzw/openairinterface5g
  • emad72/openairinterface5g
  • guojilong123/openairinterface5g
  • Rony99/openairinterface5g
  • lity/openairinterface5g
  • sshrivastava/openairinterface5g
  • zhihengzhang/openairinterface5g
  • Rakesh_B_B/openairinterface5g
  • baleeiro/openairinterface5g
  • 19125064/openairinterface5g
  • linlin/openairinterface5g
  • NA1VE/openairinterface5g
  • oai1B/openairinterface5g
  • daveprice/openairinterface5g
  • mo/openairinterface5g
  • dhanmeet/openairinterface5g
  • mv2290/openairinterface-5-g-test
  • pagmatt/openairinterface5g
  • mmTestNYU/openairinterface5g
  • mmezzavilla/openairinterface5g
  • sudhakarb/openairinterface5g
  • mekki/openairinterface5g
  • virtanen/openairinterface5g
  • dyyu/openairinterface5g
  • mohammed_safwan/openairinterface5g
  • venkat/openairinterface5g
  • rupadhya/openairinterface5g
  • adjou/openairinterface5g
  • samiemostafavi/openairinterface5g-edaf
  • Sreeram/openairinterface5g
  • oliverxsch/openairinterface5g
  • oai/openairinterface5g
160 results
Show changes
Showing
with 1912 additions and 205 deletions
Active_eNBs = ( "eNB-Eurecom-LTEBox");
# Asn1_verbosity, choice in: none, info, annoying
Asn1_verbosity = "none";
eNBs =
(
{
# real_time choice in {hard, rt-preempt, no}
real_time = "no";
////////// Identification parameters:
eNB_ID = 0xe00;
cell_type = "CELL_MACRO_ENB";
eNB_name = "eNB-Eurecom-LTEBox";
// Tracking area code, 0x0000 and 0xfffe are reserved values
tracking_area_code = 1;
plmn_list = ( { mcc = 208; mnc = 93; mnc_length = 2; } );
tr_s_preference = "local_mac"
////////// Physical parameters:
component_carriers = (
{
node_function = "NGFI_RCC_IF4p5";
node_timing = "synch_to_ext_device";
node_synch_ref = 0;
frame_type = "FDD";
tdd_config = 3;
tdd_config_s = 0;
prefix_type = "NORMAL";
eutra_band = 7;
downlink_frequency = 2685000000L;
uplink_frequency_offset = -120000000;
Nid_cell = 0;
N_RB_DL = 50;
Nid_cell_mbsfn = 0;
nb_antenna_ports = 2;
nb_antennas_tx = 1;
nb_antennas_rx = 1;
tx_gain = 90;
rx_gain = 125;
pbch_repetition = "FALSE";
prach_root = 0;
prach_config_index = 0;
prach_high_speed = "DISABLE";
prach_zero_correlation = 1;
prach_freq_offset = 2;
pucch_delta_shift = 1;
pucch_nRB_CQI = 0;
pucch_nCS_AN = 0;
pucch_n1_AN = 0;
pdsch_referenceSignalPower = -27;
pdsch_p_b = 0;
pusch_n_SB = 1;
pusch_enable64QAM = "DISABLE";
pusch_hoppingMode = "interSubFrame";
pusch_hoppingOffset = 0;
pusch_groupHoppingEnabled = "ENABLE";
pusch_groupAssignment = 0;
pusch_sequenceHoppingEnabled = "DISABLE";
pusch_nDMRS1 = 1;
phich_duration = "NORMAL";
phich_resource = "ONESIXTH";
srs_enable = "DISABLE";
/* srs_BandwidthConfig =;
srs_SubframeConfig =;
srs_ackNackST =;
srs_MaxUpPts =;*/
pusch_p0_Nominal = -104;
pusch_alpha = "AL1";
pucch_p0_Nominal = -108;
msg3_delta_Preamble = 6;
pucch_deltaF_Format1 = "deltaF2";
pucch_deltaF_Format1b = "deltaF3";
pucch_deltaF_Format2 = "deltaF0";
pucch_deltaF_Format2a = "deltaF0";
pucch_deltaF_Format2b = "deltaF0";
rach_numberOfRA_Preambles = 64;
rach_preamblesGroupAConfig = "DISABLE";
/*
rach_sizeOfRA_PreamblesGroupA = ;
rach_messageSizeGroupA = ;
rach_messagePowerOffsetGroupB = ;
*/
rach_powerRampingStep = 4;
rach_preambleInitialReceivedTargetPower = -108;
rach_preambleTransMax = 10;
rach_raResponseWindowSize = 10;
rach_macContentionResolutionTimer = 48;
rach_maxHARQ_Msg3Tx = 4;
pcch_default_PagingCycle = 128;
pcch_nB = "oneT";
bcch_modificationPeriodCoeff = 2;
ue_TimersAndConstants_t300 = 1000;
ue_TimersAndConstants_t301 = 1000;
ue_TimersAndConstants_t310 = 1000;
ue_TimersAndConstants_t311 = 10000;
ue_TimersAndConstants_n310 = 20;
ue_TimersAndConstants_n311 = 1;
ue_TransmissionMode = 1;
mbms_dedicated_serving_cell = "DISABLE"
}
);
srb1_parameters :
{
# timer_poll_retransmit = (ms) [5, 10, 15, 20,... 250, 300, 350, ... 500]
timer_poll_retransmit = 80;
# timer_reordering = (ms) [0,5, ... 100, 110, 120, ... ,200]
timer_reordering = 35;
# timer_reordering = (ms) [0,5, ... 250, 300, 350, ... ,500]
timer_status_prohibit = 0;
# poll_pdu = [4, 8, 16, 32 , 64, 128, 256, infinity(>10000)]
poll_pdu = 4;
# poll_byte = (kB) [25,50,75,100,125,250,375,500,750,1000,1250,1500,2000,3000,infinity(>10000)]
poll_byte = 99999;
# max_retx_threshold = [1, 2, 3, 4 , 6, 8, 16, 32]
max_retx_threshold = 4;
}
# ------- SCTP definitions
SCTP :
{
# Number of streams to use in input/output
SCTP_INSTREAMS = 2;
SCTP_OUTSTREAMS = 2;
};
////////// MME parameters:
mme_ip_address = ({ ipv4 = "127.0.0.3"; port = 36412; });
enable_measurement_reports = "no";
///X2
enable_x2 = "no";
t_reloc_prep = 1000; /* unit: millisecond */
tx2_reloc_overall = 2000; /* unit: millisecond */
t_dc_prep = 1000; /* unit: millisecond */
t_dc_overall = 2000; /* unit: millisecond */
////////// MCE parameters:
target_mce_m2_ip_address = ({ ipv4 = "127.0.0.7"; });
///M2
enable_enb_m2 = "yes";
mbms_configuration_data_list = (
{
mbsfn_sync_area = 0x0001;
mbms_service_area_list=(
{
mbms_service_area=0x0001;
}
);
}
);
NETWORK_INTERFACES :
{
ENB_IPV4_ADDRESS_FOR_S1_MME = "127.0.0.2/24";
ENB_IPV4_ADDRESS_FOR_S1U = "127.0.0.5/24";
ENB_PORT_FOR_S1U = 2152; # Spec 2152
ENB_IPV4_ADDRESS_FOR_X2C = "127.0.0.2/24";
ENB_PORT_FOR_X2C = 36422; # Spec 36422
ENB_IPV4_ADDRESS_FOR_M2C = "127.0.0.2/24";
ENB_PORT_FOR_M2C = 36443; # Spec 36443
};
}
);
MCEs = (
{
MCE_ID = 0xe00;
MCE_name = "MCE-Vicomtech-LTEBox";
//M2
enable_mce_m2 = "yes";
//M3
enable_mce_m3 = "yes";
target_mme_m3_ip_address = ({ ipv4 = "127.0.0.18"; });
NETWORK_INTERFACES :
{
MCE_INTERFACE_NAME_FOR_M2_ENB = "lo";
MCE_IPV4_ADDRESS_FOR_M2C = "127.0.0.7/24";
MCE_PORT_FOR_M2C = 36443; # Spec 36443
MCE_INTERFACE_NAME_FOR_M3_MME = "lo";
MCE_IPV4_ADDRESS_FOR_M3C = "127.0.0.3/24";
MCE_PORT_FOR_M3C = 36444; # Spec 36444
};
plnm:
{
mcc = 208;
mnc = 93;
mnc_length = 2;
};
mbms_sched_info :
{
mcch_update_time = 10;
mbms_area_config_list = (
{
common_sf_allocation_period = 2; #rf4(0) rf8(1) rf16(2) rf32(3) rf64(4) rf128(5) rf256(6)
mbms_area_id = 0;
pmch_config_list = (
{
allocated_sf_end=32;
data_mcs=14;
mch_scheduling_period = 0; #rf8(0)
mbms_session_list = (
{
#plnm + service_id ->tmgi
plnm:
{
mcc = 208;
mnc = 93;
mnc_length = 2;
}
service_id=0; #keep this allways as 0 (workaround for TUN if)
lcid=6; #this must be properly defined lcid:6+service:0 -> rab_id:6
}
);
}
);
mbms_sf_config_list = (
{
radioframe_allocation_period=1; #n1(0) n2(1) n4(2) n8(3) n16(4) n32(5)
radioframe_alloocation_offset=0;
num_frame="oneFrame";
subframe_allocation=57; #xx111001
//num_frame="fourFrame";
//subframe_allocation=14548987; #
}
);
}
);
};
mcch_config_per_mbsfn_area = (
{
mbsfn_area = 0;
pdcch_length = 1; #s1(0), s2(1)
repetition_period = 0; #rf32(0), rf64(1), rf128(2), rf256(3)
offset = 0;
modification_period = 0; #rf512(0; rf1024(1)
subframe_allocation_info = 32; #BITSTRING (6bits -> one frame) xx100000
mcs = 1; #n2(0), n7(1), n13(2), n19(3)
}
);
#); #end mbms_scheduling_info
}
);
MMEs = (
{
MME_ID = 0xe00;
MME_name = "MME-MBMS-Vicomtech-LTEBox";
//M3
enable_mme_m3 = "yes";
NETWORK_INTERFACES :
{
MME_INTERFACE_NAME_FOR_M3_MCE = "lo";
MME_IPV4_ADDRESS_FOR_M3C = "127.0.0.18/24";
MME_PORT_FOR_M3C = 36444; # Spec 36444
};
}
);
MACRLCs = (
{
num_cc = 1;
tr_s_preference = "local_L1";
tr_n_preference = "local_RRC";
}
);
L1s = (
{
num_cc = 1;
tr_n_preference = "local_mac";
}
);
RUs = (
{
local_if_name = "lo";
remote_address = "127.0.0.2";
local_address = "127.0.0.1";
local_portc = 50000;
remote_portc = 50000;
local_portd = 50001;
remote_portd = 50001;
local_rf = "no"
tr_preference = "udp_if4p5"
nb_tx = 2
nb_rx = 2
att_tx = 0
att_rx = 0;
eNB_instances = [0];
is_slave = "no";
}
);
THREAD_STRUCT = (
{
#three config for level of parallelism "PARALLEL_SINGLE_THREAD", "PARALLEL_RU_L1_SPLIT", or "PARALLEL_RU_L1_TRX_SPLIT"
parallel_config = "PARALLEL_SINGLE_THREAD";
#two option for worker "WORKER_DISABLE" or "WORKER_ENABLE"
worker_config = "WORKER_ENABLE";
}
);
log_config = {
global_log_level ="info";
hw_log_level ="info";
phy_log_level ="info";
mac_log_level ="info";
rlc_log_level ="info";
pdcp_log_level ="info";
rrc_log_level ="info";
};
......@@ -172,18 +172,20 @@ eNBs =
////////// MME parameters:
mme_ip_address = ( { ipv4 = "CI_MME_IP_ADDR";
ipv6 = "192:168:30::17";
active = "yes";
preference = "ipv4";
}
);
mme_ip_address = ({ ipv4 = "CI_MME_IP_ADDR"; port = 36412; });
enable_measurement_reports = "no";
///X2
enable_x2 = "no";
t_reloc_prep = 1000; /* unit: millisecond */
tx2_reloc_overall = 2000; /* unit: millisecond */
t_dc_prep = 1000; /* unit: millisecond */
t_dc_overall = 2000; /* unit: millisecond */
NETWORK_INTERFACES :
{
ENB_INTERFACE_NAME_FOR_S1_MME = "ens3";
ENB_IPV4_ADDRESS_FOR_S1_MME = "CI_ENB_IP_ADDR";
ENB_INTERFACE_NAME_FOR_S1U = "ens3";
ENB_IPV4_ADDRESS_FOR_S1U = "CI_ENB_IP_ADDR";
ENB_PORT_FOR_S1U = 2152; # Spec 2152
ENB_IPV4_ADDRESS_FOR_X2C = "CI_ENB_IP_ADDR";
......@@ -196,13 +198,12 @@ eNBs =
MACRLCs = (
{
num_cc = 1;
local_s_if_name = "lo:";
remote_s_address = "127.0.0.1";
local_s_address = "127.0.0.2";
local_s_portc = 50001;
remote_s_portc = 50000;
local_s_portd = 50011;
remote_s_portd = 50010;
remote_s_address = "CI_UE_IP_ADDR";
local_s_address = "CI_ENB_IP_ADDR";
local_s_portc = 50001;
remote_s_portc = 50000;
local_s_portd = 50011;
remote_s_portd = 50010;
tr_s_preference = "nfapi";
tr_n_preference = "local_RRC";
}
......@@ -211,7 +212,7 @@ MACRLCs = (
THREAD_STRUCT = (
{
#three config for level of parallelism "PARALLEL_SINGLE_THREAD", "PARALLEL_RU_L1_SPLIT", or "PARALLEL_RU_L1_TRX_SPLIT"
parallel_config = "PARALLEL_RU_L1_TRX_SPLIT";
parallel_config = "PARALLEL_RU_L1_SPLIT";
#two option for worker "WORKER_DISABLE" or "WORKER_ENABLE"
worker_config = "WORKER_ENABLE";
}
......@@ -220,17 +221,10 @@ THREAD_STRUCT = (
log_config =
{
global_log_level ="info";
global_log_verbosity ="medium";
hw_log_level ="info";
hw_log_verbosity ="medium";
phy_log_level ="info";
phy_log_verbosity ="medium";
mac_log_level ="info";
mac_log_verbosity ="high";
rlc_log_level ="info";
rlc_log_verbosity ="medium";
pdcp_log_level ="info";
pdcp_log_verbosity ="medium";
rrc_log_level ="info";
rrc_log_verbosity ="medium";
};
RUs = (
{
local_if_name = "enp1s0";
remote_address = "CI_RCC_IP_ADDR";
local_address = "CI_ENB_IP_ADDR";
local_portc = 50002;
remote_portc = 50002;
local_portd = 50003;
remote_portd = 50003;
local_rf = "yes"
tr_preference = "udp_if4p5";
nb_tx = 1;
nb_rx = 1;
max_pdschReferenceSignalPower = -12;
max_rxgain = 100;
bands = [38];
is_slave = "no";
ota_sync_enabled = "yes";
}
);
THREAD_STRUCT = (
{
#three config for level of parallelism "PARALLEL_SINGLE_THREAD", "PARALLEL_RU_L1_SPLIT", or "PARALLEL_RU_L1_TRX_SPLIT"
parallel_config = "PARALLEL_SINGLE_THREAD";
#two option for worker "WORKER_DISABLE" or "WORKER_ENABLE"
worker_config = "WORKER_ENABLE";
}
);
log_config = {
global_log_level ="info";
hw_log_level ="info";
phy_log_level ="info";
mac_log_level ="info";
rlc_log_level ="info";
pdcp_log_level ="info";
rrc_log_level ="info";
};
RUs = (
{
local_if_name = "enp1s0";
remote_address = "CI_RCC_IP_ADDR";
local_address = "CI_ENB_IP_ADDR";
local_portc = 50010;
remote_portc = 50010;
local_portd = 50011;
remote_portd = 50011;
local_rf = "yes"
tr_preference = "udp_if4p5";
nb_tx = 1;
nb_rx = 1;
max_pdschReferenceSignalPower = -12;
max_rxgain = 100;
bands = [38];
is_slave = "yes";
ota_sync_enabled = "yes";
}
);
THREAD_STRUCT = (
{
#three config for level of parallelism "PARALLEL_SINGLE_THREAD", "PARALLEL_RU_L1_SPLIT", or "PARALLEL_RU_L1_TRX_SPLIT"
parallel_config = "PARALLEL_SINGLE_THREAD";
#two option for worker "WORKER_DISABLE" or "WORKER_ENABLE"
worker_config = "WORKER_ENABLE";
}
);
log_config = {
global_log_level ="info";
hw_log_level ="info";
phy_log_level ="info";
mac_log_level ="info";
rlc_log_level ="info";
pdcp_log_level ="info";
rrc_log_level ="info";
};
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
# Python for CI of OAI-eNB + COTS-UE
#
# Required Python Version
# Python 3.x
#
# Required Python Package
# pexpect
#---------------------------------------------------------------------
#-----------------------------------------------------------
# Version
#-----------------------------------------------------------
Version = '0.2'
#-----------------------------------------------------------
# Constants
#-----------------------------------------------------------
ALL_PROCESSES_OK = 0
ENB_PROCESS_FAILED = -1
ENB_PROCESS_OK = +1
ENB_PROCESS_SEG_FAULT = -11
ENB_PROCESS_ASSERTION = -12
ENB_PROCESS_REALTIME_ISSUE = -13
ENB_PROCESS_NOLOGFILE_TO_ANALYZE = -14
ENB_PROCESS_SLAVE_RRU_NOT_SYNCED = -15
ENB_REAL_TIME_PROCESSING_ISSUE = -16
ENB_RETX_ISSUE = -17
ENB_SHUTDOWN_NO_BYE = -18
HSS_PROCESS_FAILED = -2
HSS_PROCESS_OK = +2
MME_PROCESS_FAILED = -3
MME_PROCESS_OK = +3
SPGW_PROCESS_FAILED = -4
SPGW_PROCESS_OK = +4
UE_IP_ADDRESS_ISSUE = -5
OAI_UE_PROCESS_NOLOGFILE_TO_ANALYZE = -20
OAI_UE_PROCESS_COULD_NOT_SYNC = -21
OAI_UE_PROCESS_ASSERTION = -22
OAI_UE_PROCESS_FAILED = -23
OAI_UE_PROCESS_NO_TUNNEL_INTERFACE = -24
OAI_UE_PROCESS_SEG_FAULT = -25
OAI_UE_PROCESS_NO_MBMS_MSGS = -26
OAI_UE_PROCESS_OK = +6
INVALID_PARAMETER = -50
PHYSIM_IMAGE_ABSENT = -60
OC_LOGIN_FAIL = -61
OC_PROJECT_FAIL = -62
OC_IS_FAIL = -63
OC_PHYSIM_DEPLOY_FAIL = -64
UE_STATUS_DETACHED = 0
UE_STATUS_DETACHING = 1
UE_STATUS_ATTACHING = 2
UE_STATUS_ATTACHED = 3
X2_HO_REQ_STATE__IDLE = 0
X2_HO_REQ_STATE__TARGET_RECEIVES_REQ = 1
X2_HO_REQ_STATE__TARGET_RRC_RECFG_COMPLETE = 2
X2_HO_REQ_STATE__TARGET_SENDS_SWITCH_REQ = 3
X2_HO_REQ_STATE__SOURCE_RECEIVES_REQ_ACK = 10
# placeholder for a "don't care" password to gradually transition to
# passwordless CI
CI_NO_PASSWORD = "CIPASSWORDDONTCARE"
......@@ -24,35 +24,32 @@
// section for "valid" memory leaks: the related functions are allocators and
// the caller is responsible of freeing the memory. cppcheck has a mechanism
// to check more accuretaly this, by defining callers responsible of freeing
// but tools like valgring might be more suitable
// but tools like valgring might be more suitable
//
//-----------------------------------------------------------------------------
// suppress error about keysP memory leak, free must be done by calling func
// suppress error about keysP memory leak, free must be done by calling func
memleak:common/utils/hashtable/obj_hashtable.c
//-----------------------------------------------------------------------------
// suppress error about keys memory leak, free must be done by calling func
// suppress error about keys memory leak, free must be done by calling func
memleak:openair2/UTIL/OMG/omg_hashtable.c
//-----------------------------------------------------------------------------
// suppress error about data memory leak. This is the buffer where
// suppress error about data memory leak. This is the buffer where
// _emm_as_encode function creates the encoded buffer
//
memleak:openair3/NAS/UE/EMM/SAP/emm_as.c
memleak:openair1/PHY/INIT/nr_init_ue.c
//-----------------------------------------------------------------------------
//*****************************************************************************
// section for files not used in oai exec's included in CI.
// section for files not used in oai exec's included in CI.
// Possibly candidates for removal otherwise should be documented and updated
// for project rules enforcement
// ----------------------------------------------------------------------------
// likely sources for test programs, maintained?
// likely sources for test programs, maintained?
invalidPrintfArgType_sint:openair1/PHY/CODING/TESTBENCH/ltetest.c
memleak:openair1/PHY/CODING/TESTBENCH/ltetest.c
invalidPrintfArgType_sint:openair1/PHY/CODING/TESTBENCH/pdcch_test.c
//
//-----------------------------------------------------------------------------
// oaisim deprecated, remove?
doubleFree:openair3/TEST/oaisim_mme_list_benchmark.c
//
//-----------------------------------------------------------------------------
// is itti analyzer deprecated
nullPointer:common/utils/itti_analyzer/itti_analyzer.c
nullPointerRedundantCheck:common/utils/itti_analyzer/libbuffers/buffers.c
......@@ -72,14 +69,29 @@ uninitvar:openair2/UTIL/OTG/otg_rx_socket.c
// iteration of the loop.
nullPointer:common/utils/T/local_tracer.c:243
//-----------------------------------------------------------------------------
// once again cppcheck is not to understand that fds is initialized in the
// once again cppcheck does not understand that fds is initialized in the
// first iteration of the loop
nullPointer:common/utils/T/tracer/multi.c:264
nullPointer:common/utils/T/tracer/multi.c:265
//-----------------------------------------------------------------------------
// this file is used for testing the RLC V2 implementation, this error is
// not a problem, the programmer has to know what she does when writing
// the tests
arrayIndexOutOfBounds:openair2/LAYER2/rlc_v2/tests/test.c:401
//
//-----------------------------------------------------------------------------
// this file is used for testing the NR RLC implementation, this error is
// not a problem, the programmer has to know what she does when writing
// the tests
arrayIndexOutOfBounds:openair2/LAYER2/nr_rlc/tests/test.c:451
//
//-----------------------------------------------------------------------------
// cppcheck does not understand the different lengths of arrays
arrayIndexOutOfBounds:openair1/SIMULATION/TOOLS/random_channel.c:705
arrayIndexOutOfBounds:openair1/SIMULATION/TOOLS/random_channel.c:706
//*****************************************************************************
//
// True problems we don't know how to fix, Suppression is commented out,
//
// True problems we don't know how to fix, Suppression is commented out,
// as these kind of problem need either to be fixed or can be suppressed
// when fully uderstood
//-----------------------------------------------------------------------------
......@@ -89,13 +101,13 @@ nullPointer:common/utils/T/tracer/multi.c:265
// memleak:nfapi/open-nFAPI/pnf/src/pnf_p7_interface.c
// memleak:nfapi/open-nFAPI/vnf/src/vnf_p7_interface.c
//-----------------------------------------------------------------------------
// may be security_data->kenb.value is released from calling functions. But even
// when, for test, freeing it before returning from emm_proc_security_mode_command
// may be security_data->kenb.value is released from calling functions. But even
// when, for test, freeing it before returning from emm_proc_security_mode_command
// which does the allocation, cppcheck complains. So something might be wrong...
// memleak:openair3/NAS/UE/EMM/SecurityModeControl.c
//-----------------------------------------------------------------------------
// when used, nobody but the original developer can guess if sn_data_cnf is set or not
// cppcheck found that in some cases it is not, code needs cleanup before fixing that...
// cppcheck found that in some cases it is not, code needs cleanup before fixing that...
// uninitvar:openair2/LAYER2/RLC/AM_v9.3.0/rlc_am_status_report.c
//*****************************************************************************
// *INDENT-ON*
#!/bin/bash
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
function create_usage {
echo "OAI CI VM script"
echo " Original Author: Raphael Defosseux"
echo " Requirements:"
echo " -- uvtool uvtool-libvirt apt-cacher"
echo " -- xenial image already synced"
echo " Default:"
echo " -- eNB with USRP"
echo ""
echo "Usage:"
echo "------"
echo " oai-ci-vm-tool create [OPTIONS]"
echo ""
echo "Mandatory Options:"
echo "--------"
echo " --job-name #### OR -jn ####"
echo " Specify the name of the Jenkins job."
echo ""
echo " --build-id #### OR -id ####"
echo " Specify the build ID of the Jenkins job."
echo ""
variant_usage
echo " Specify the variant to build."
echo ""
echo " --help OR -h"
echo " Print this help message."
echo ""
}
function acquire_vm_create_lock {
local FlockFile="/tmp/vmclone.lck"
local unlocked="0"
touch ${FlockFile} 2>/dev/null
if [[ $? -ne 0 ]]
then
echo "Cannot access lock file ${FlockFile}"
exit 2
fi
while [ $unlocked -eq 0 ]
do
exec 5>${FlockFile}
flock -nx 5
if [[ $? -ne 0 ]]
then
echo "Another instance of VM creation is running"
sleep 10
else
unlocked="1"
fi
done
chmod 666 ${FlockFile} 2>/dev/null
}
function release_vm_create_lock {
local FlockFile="/tmp/vmclone.lck"
rm -Rf ${FlockFile}
}
function create_vm {
echo "############################################################"
echo "OAI CI VM script"
echo "############################################################"
echo "VM_NAME = $VM_NAME"
echo "VM_MEMORY = $VM_MEMORY MBytes"
echo "VM_CPU = $VM_CPU"
echo "############################################################"
echo "Creating VM ($VM_NAME) on Ubuntu Cloud Image base"
echo "############################################################"
acquire_vm_create_lock
uvt-kvm create $VM_NAME release=xenial --memory $VM_MEMORY --cpu $VM_CPU --unsafe-caching --template ci-scripts/template-host.xml
echo "Waiting for VM to be started"
uvt-kvm wait $VM_NAME --insecure
VM_IP_ADDR=`uvt-kvm ip $VM_NAME`
echo "$VM_NAME has for IP addr = $VM_IP_ADDR"
release_vm_create_lock
}
#this is a configuration file
#used to build real time processing statistics
#for 5G NR phy test (gNB terminate)
Title : Processing Time (us) from datalog_rt_stats.100.2x2.yaml
ColNames :
- Metric
- Average; Max; Count
- Average vs Reference Deviation (Reference Value; Acceptability Deviation Threshold)
Ref :
feprx : 150.0
feptx_prec : 0.0
feptx_ofdm : 60.0
feptx_total : 150.0
L1 Tx processing : 530.0
DLSCH encoding : 220.0
L1 Rx processing : 530.0
PUSCH inner-receiver : 360.0
Schedule Response : 3.0
DL & UL scheduling timing : 17.0
UL Indication : 3.0
Slot Indication : 17.0
DeviationThreshold :
feprx : 0.25
feptx_prec : 0.25
feptx_ofdm : 0.25
feptx_total : 0.25
L1 Tx processing : 0.25
DLSCH encoding : 0.25
L1 Rx processing : 0.25
PUSCH inner-receiver : 0.25
Schedule Response : 1.00
DL & UL scheduling timing : 0.25
UL Indication : 1.00
Slot Indication : 0.50
#this is a configuration file
#used to build real time processing statistics
#for 5G NR phy test (gNB terminate)
Title : Processing Time (us) from datalog_rt_stats.1x1.60.yaml
ColNames :
- Metric
- Average; Max; Count
- Average vs Reference Deviation (Reference Value; Acceptability Deviation Threshold)
Ref :
feprx : 40.0
feptx_prec : 15.0
feptx_ofdm : 30.0
feptx_total : 45.0
L1 Tx processing : 205.0
DLSCH encoding : 140.0
L1 Rx processing : 345.0
PUSCH inner-receiver : 150.0
Schedule Response : 3.0
DL & UL scheduling timing : 7.0
UL Indication : 3.0
Slot Indication : 8.0
DeviationThreshold :
feprx : 0.25
feptx_prec : 0.25
feptx_ofdm : 0.25
feptx_total : 0.25
L1 Tx processing : 0.25
DLSCH encoding : 0.25
L1 Rx processing : 0.25
PUSCH inner-receiver : 0.25
Schedule Response : 1.00
DL & UL scheduling timing : 0.50
UL Indication : 1.00
Slot Indication : 0.50
#this is a configuration file
#used to build real time processing statistics
#for 5G NR phy test (gNB terminate)
Title : Processing Time (us) from datalog_rt_stats.2x2.yaml
ColNames :
- Metric
- Average; Max; Count
- Average vs Reference Deviation (Reference Value; Acceptability Deviation Threshold)
Ref :
feprx : 120.0
feptx_prec : 30.0
feptx_ofdm : 50.0
feptx_total : 120.0
L1 Tx processing : 300.0
DLSCH encoding : 230.0
L1 Rx processing : 175.0
PUSCH inner-receiver : 100.0
Schedule Response : 3.0
DL & UL scheduling timing : 11.0
UL Indication : 3.0
Slot Indication : 14.0
DeviationThreshold :
feprx : 0.25
feptx_prec : 0.25
feptx_ofdm : 0.25
feptx_total : 0.25
L1 Tx processing : 0.25
DLSCH encoding : 0.25
L1 Rx processing : 0.25
PUSCH inner-receiver : 0.25
Schedule Response : 1.00
DL & UL scheduling timing : 0.50
UL Indication : 1.00
Slot Indication : 0.25
#this is a configuration file
#used to build real time processing statistics
#for 5G NR phy test (gNB terminate)
Title : Processing Time (us) from datalog_rt_stats.60.2x2.yaml
ColNames :
- Metric
- Average; Max; Count
- Average vs Reference Deviation (Reference Value; Acceptability Deviation Threshold)
Ref :
feprx : 75.0
feptx_prec : 14.0
feptx_ofdm : 30.0
feptx_total : 80.0
L1 Tx processing : 315.0
DLSCH encoding : 155.0
L1 Rx processing : 345.0
PUSCH inner-receiver : 155.0
Schedule Response : 3.0
DL & UL scheduling timing : 13.0
UL Indication : 3.0
Slot Indication : 12.0
DeviationThreshold :
feprx : 0.25
feptx_prec : 0.25
feptx_ofdm : 0.25
feptx_total : 0.25
L1 Tx processing : 0.25
DLSCH encoding : 0.25
L1 Rx processing : 0.25
PUSCH inner-receiver : 0.25
Schedule Response : 1.00
DL & UL scheduling timing : 0.35
UL Indication : 1.00
Slot Indication : 0.35
#this is a configuration file
#used to build real time processing statistics
#for 5G NR phy test (gNB terminate)
Title : Processing Time (us) from datalog_rt_stats.default.yaml
ColNames :
- Metric
- Average; Max; Count
- Average vs Reference Deviation (Reference Value; Acceptability Deviation Threshold)
Ref :
feprx : 40.0
feptx_prec : 13.0
feptx_ofdm : 30.0
feptx_total : 45.0
L1 Tx processing : 160.0
DLSCH encoding : 100.0
L1 Rx processing : 290.0
PUSCH inner-receiver : 115.0
Schedule Response : 3.0
DL & UL scheduling timing : 6.0
UL Indication : 2.0
Slot Indication : 7.0
DeviationThreshold :
feprx : 0.25
feptx_prec : 0.25
feptx_ofdm : 0.25
feptx_total : 0.25
L1 Tx processing : 0.25
DLSCH encoding : 0.25
L1 Rx processing : 0.25
PUSCH inner-receiver : 0.25
Schedule Response : 1.00
DL & UL scheduling timing : 0.50
UL Indication : 1.00
Slot Indication : 0.50
#!/bin/bash
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
function destroy_usage {
echo "OAI CI VM script"
echo " Original Author: Raphael Defosseux"
echo " Requirements:"
echo " -- uvtool uvtool-libvirt apt-cacher"
echo ""
echo "Usage:"
echo "------"
echo " oai-ci-vm-tool destroy [OPTIONS]"
echo ""
echo "Mandatory Options:"
echo "--------"
echo " --job-name #### OR -jn ####"
echo " Specify the name of the Jenkins job."
echo ""
echo " --build-id #### OR -id ####"
echo " Specify the build ID of the Jenkins job."
echo ""
echo "Options:"
echo "--------"
echo " --help OR -h"
echo " Print this help message."
echo ""
}
function destroy_vm {
echo "############################################################"
echo "OAI CI VM script"
echo "############################################################"
echo "VM_TEMPLATE = $VM_TEMPLATE"
LIST_CI_VM=`uvt-kvm list | grep $VM_TEMPLATE`
for CI_VM in $LIST_CI_VM
do
VM_IP_ADDR=`uvt-kvm ip $CI_VM`
echo "VM to destroy: $CI_VM -- IP $VM_IP_ADDR"
uvt-kvm destroy $CI_VM
ssh-keygen -R $VM_IP_ADDR
done
}
......@@ -125,12 +125,19 @@ fi
git config user.email "jenkins@openairinterface.org"
git config user.name "OAI Jenkins"
git checkout -f $SOURCE_COMMIT_ID
git log -n1 --pretty=format:\"%s\" > .git/CI_COMMIT_MSG
git checkout -f $SOURCE_COMMIT_ID > checkout.txt 2>&1
STATUS=`grep -E -c "fatal: reference is not a tree" checkout.txt`
rm -f checkout.txt
if [ $STATUS -ne 0 ]
then
echo "fatal: reference is not a tree --> $SOURCE_COMMIT_ID"
STATUS=-1
exit $STATUS
fi
git merge --ff $TARGET_COMMIT_ID -m "Temporary merge for CI"
STATUS=`git status | egrep -c "You have unmerged paths.|fix conflicts"`
STATUS=`git status | grep -E -c "You have unmerged paths.|fix conflicts"`
if [ $STATUS -ne 0 ]
then
echo "There are merge conflicts.. Cannot perform further build tasks"
......
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
#
# Dockerfile for the Open-Air-Interface BUILD service
# Valid for Ubuntu 22.04
#
#---------------------------------------------------------------------
FROM ubuntu:xenial AS oai-cppcheck
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get upgrade --yes && \
DEBIAN_FRONTEND=noninteractive apt-get install --yes \
build-essential \
vim \
cppcheck
WORKDIR /oai-ran
COPY . .
WORKDIR /oai-ran/common/utils/T
RUN make
WORKDIR /oai-ran
RUN mkdir -p cmake_targets/log && \
cppcheck --enable=warning --force --xml --xml-version=2 \
--inline-suppr \
-i openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_decoder.c \
--suppressions-list=ci-scripts/cppcheck_suppressions.list \
-I common/utils \
-I openair3/NAS/COMMON/UTIL \
-j`nproc` . 2> cmake_targets/log/cppcheck.xml 1> cmake_targets/log/cppcheck_build.txt
RUN grep -E -c 'severity="error' cmake_targets/log/cppcheck.xml
RUN grep -E -c 'severity="warning' cmake_targets/log/cppcheck.xml
RUN cat cmake_targets/log/cppcheck.xml
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
#
# Dockerfile for the Open-Air-Interface BUILD service
# Valid for Ubuntu 22.04
#
#---------------------------------------------------------------------
FROM ubuntu:bionic AS oai-formatting-check
ARG MERGE_REQUEST
ARG SRC_BRANCH
ARG TARGET_BRANCH
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get upgrade --yes && \
DEBIAN_FRONTEND=noninteractive apt-get install --yes \
gawk \
git
WORKDIR /oai-ran
COPY . .
RUN /bin/bash -c "if [[ -v MERGE_REQUEST ]]; then echo 'Source Branch = $SRC_BRANCH'; echo 'Target Branch = $TARGET_BRANCH'; else echo 'Push to develop'; fi"
RUN /bin/bash -c "if [[ -v MERGE_REQUEST ]]; then ./ci-scripts/checkCodingFormattingRules.sh --src-branch $SRC_BRANCH --target-branch $TARGET_BRANCH; else ./ci-scripts/checkCodingFormattingRules.sh; fi"
RUN echo "=== Files with incorrect define protection ===" && \
/bin/bash -c "if [[ -f header-files-w-incorrect-define.txt ]]; then cat header-files-w-incorrect-define.txt; fi"
RUN echo "=== Files with a GNU GPL licence Banner ===" && \
/bin/bash -c "if [[ -f files-w-gnu-gpl-license-banner.txt ]]; then cat files-w-gnu-gpl-license-banner.txt; fi"
RUN echo "=== Files with a suspect Banner ===" && \
/bin/bash -c "if [[ -f files-w-suspect-banner.txt ]]; then cat files-w-suspect-banner.txt; fi"
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
#
# Dockerfile for the Open-Air-Interface BUILD service
# Valid for Ubuntu 22.04
#
#---------------------------------------------------------------------
FROM ran-base:develop AS ran-tests
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get upgrade --yes && \
DEBIAN_FRONTEND=noninteractive apt-get install --yes \
libgtest-dev \
libyaml-cpp-dev
RUN rm -Rf /oai-ran
WORKDIR /oai-ran
COPY . .
WORKDIR /oai-ran/build
RUN cmake -GNinja -DENABLE_TESTS=ON -DCMAKE_BUILD_TYPE=Debug -DSANITIZE_ADDRESS=True .. && ninja tests
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
# Python for CI of OAI-eNB + COTS-UE
#
# Required Python Version
# Python 3.x
#
# Required Python Package
# pexpect
#---------------------------------------------------------------------
#-----------------------------------------------------------
# Import
#-----------------------------------------------------------
import sys # arg
import re # reg
import logging
import os
import time
import signal
#-----------------------------------------------------------
# OAI Testing modules
#-----------------------------------------------------------
import sshconnection as SSH
import helpreadme as HELP
import constants as CONST
import cls_cluster as OC
import cls_cmd
import cls_module
#-----------------------------------------------------------
# Class Declaration
#-----------------------------------------------------------
class EPCManagement():
def __init__(self):
self.IPAddress = ''
self.UserName = ''
self.Password = ''
self.SourceCodePath = ''
self.Type = ''
self.PcapFileName = ''
self.testCase_id = ''
self.MmeIPAddress = ''
self.containerPrefix = 'prod'
self.mmeConfFile = 'mme.conf'
self.yamlPath = ''
self.isMagmaUsed = False
self.cfgDeploy = '--type start-mini --scenario 1 --capture /tmp/oai-cn5g-v1.5.pcap' #from xml, 'mini' is default normal for docker-network.py
self.cfgUnDeploy = '--type stop-mini --scenario 1' #from xml, 'mini' is default normal for docker-network.py
self.OCUrl = "https://api.oai.cs.eurecom.fr:6443"
self.OCRegistry = "default-route-openshift-image-registry.apps.oai.cs.eurecom.fr"
self.OCUserName = ''
self.OCPassword = ''
self.cnID = ''
self.imageToPull = ''
self.eNBSourceCodePath = ''
#-----------------------------------------------------------
# EPC management functions
#-----------------------------------------------------------
def InitializeHSS(self, HTML):
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.SourceCodePath == '' or self.Type == '':
HELP.GenericHelp(CONST.Version)
HELP.EPCSrvHelp(self.IPAddress, self.UserName, self.Password, self.SourceCodePath, self.Type)
sys.exit('Insufficient EPC Parameters')
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 Cassandra-based HSS in Docker')
mySSH.command('if [ -d ' + self.SourceCodePath + '/scripts ]; then echo ' + self.Password + ' | sudo -S rm -Rf ' + self.SourceCodePath + '/scripts ; fi', '\$', 5)
mySSH.command('mkdir -p ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-hss /bin/bash -c "nohup tshark -i eth0 -i eth1 -w /tmp/hss_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
time.sleep(5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-hss /bin/bash -c "nohup ./bin/oai_hss -j ./etc/hss_rel14.json --reloadkey true > hss_check_run.log 2>&1"', '\$', 5)
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 Cassandra-based HSS')
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
logging.debug('\u001B[1m Launching tshark on all interfaces \u001B[0m')
self.PcapFileName = 'epc_' + self.testCase_id + '.pcap'
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f ' + self.PcapFileName, '\$', 5)
mySSH.command('echo $USER; nohup sudo tshark -f "tcp port not 22 and port not 53" -i any -w ' + self.SourceCodePath + '/scripts/' + self.PcapFileName + ' > /tmp/tshark.log 2>&1 &', self.UserName, 5)
mySSH.command('echo ' + self.Password + ' | sudo -S mkdir -p logs', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f hss_' + self.testCase_id + '.log logs/hss*.*', '\$', 5)
mySSH.command('echo "oai_hss -j /usr/local/etc/oai/hss_rel14.json" > ./my-hss.sh', '\$', 5)
mySSH.command('chmod 755 ./my-hss.sh', '\$', 5)
mySSH.command('sudo daemon --unsafe --name=hss_daemon --chdir=' + self.SourceCodePath + '/scripts -o ' + self.SourceCodePath + '/scripts/hss_' + self.testCase_id + '.log ./my-hss.sh', '\$', 5)
elif re.match('OAI', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC HSS')
mySSH.command('cd ' + self.SourceCodePath, '\$', 5)
mySSH.command('source oaienv', '\$', 5)
mySSH.command('cd scripts', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./run_hss 2>&1 | stdbuf -o0 awk \'{ print strftime("[%Y/%m/%d %H:%M:%S] ",systime()) $0 }\' | stdbuf -o0 tee -a hss_' + self.testCase_id + '.log &', 'Core state: 2 -> 3', 35)
elif re.match('ltebox', self.Type, re.IGNORECASE):
logging.debug('Using the ltebox simulated HSS')
mySSH.command('if [ -d ' + self.SourceCodePath + '/scripts ]; then echo ' + self.Password + ' | sudo -S rm -Rf ' + self.SourceCodePath + '/scripts ; fi', '\$', 5)
mySSH.command('mkdir -p ' + self.SourceCodePath + '/scripts', '\$', 5)
result = re.search('hss_sim s6as diam_hss', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall hss_sim', '\$', 5)
mySSH.command('ps aux | grep --colour=never xGw | grep -v grep', '\$', 5, silent=True)
result = re.search('root.*xGw', mySSH.getBefore())
if result is not None:
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./stop_ltebox', '\$', 5)
mySSH.command('cd /opt/hss_sim0609', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f hss.log', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S echo "Starting sudo session" && sudo su -c "screen -dm -S simulated_hss ./starthss"', '\$', 5)
else:
logging.error('This option should not occur!')
mySSH.close()
HTML.CreateHtmlTestRow(self.Type, 'OK', CONST.ALL_PROCESSES_OK)
return True
def InitializeMME(self, HTML):
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.SourceCodePath == '' or self.Type == '':
HELP.GenericHelp(CONST.Version)
HELP.EPCSrvHelp(self.IPAddress, self.UserName, self.Password, self.SourceCodePath, self.Type)
sys.exit('Insufficient EPC Parameters')
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 MME in Docker')
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-mme /bin/bash -c "nohup tshark -i eth0 -i lo:s10 -f "not port 2152" -w /tmp/mme_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
time.sleep(5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-mme /bin/bash -c "nohup ./bin/oai_mme -c ./etc/' + self.mmeConfFile + ' > mme_check_run.log 2>&1"', '\$', 5)
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 MME')
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f mme_' + self.testCase_id + '.log', '\$', 5)
mySSH.command('echo "./run_mme --config-file /usr/local/etc/oai/mme.conf --set-virt-if" > ./my-mme.sh', '\$', 5)
mySSH.command('chmod 755 ./my-mme.sh', '\$', 5)
mySSH.command('sudo daemon --unsafe --name=mme_daemon --chdir=' + self.SourceCodePath + '/scripts -o ' + self.SourceCodePath + '/scripts/mme_' + self.testCase_id + '.log ./my-mme.sh', '\$', 5)
elif re.match('OAI', self.Type, re.IGNORECASE):
mySSH.command('cd ' + self.SourceCodePath, '\$', 5)
mySSH.command('source oaienv', '\$', 5)
mySSH.command('cd scripts', '\$', 5)
mySSH.command('stdbuf -o0 hostname', '\$', 5)
result = re.search('hostname\\\\r\\\\n(?P<host_name>[a-zA-Z0-9\-\_]+)\\\\r\\\\n', mySSH.getBefore())
if result is None:
logging.debug('\u001B[1;37;41m Hostname Not Found! \u001B[0m')
sys.exit(1)
host_name = result.group('host_name')
mySSH.command('echo ' + self.Password + ' | sudo -S ./run_mme 2>&1 | stdbuf -o0 tee -a mme_' + self.testCase_id + '.log &', 'MME app initialization complete', 100)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
# Clean-up the logs from previous runs
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f ../var/log/*.0', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./start_mme', '\$', 5)
else:
logging.error('This option should not occur!')
mySSH.close()
HTML.CreateHtmlTestRow(self.Type, 'OK', CONST.ALL_PROCESSES_OK)
return True
def SetMmeIPAddress(self):
# Not an error if we don't need an EPC
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.SourceCodePath == '' or self.Type == '':
return
if self.IPAddress == 'none':
return
# Only in case of Docker containers, MME IP address is not the EPC HOST IP address
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
self.isMagmaUsed = False
mySSH.command('docker ps -a', '\$', 5)
result = re.search('magma', mySSH.getBefore())
if result is not None:
self.isMagmaUsed = True
if self.isMagmaUsed:
mySSH.command('docker inspect --format="MME_IP_ADDR = {{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}" ' + self.containerPrefix + '-magma-mme', '\$', 5)
else:
mySSH.command('docker inspect --format="MME_IP_ADDR = {{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}" ' + self.containerPrefix + '-oai-mme', '\$', 5)
result = re.search('MME_IP_ADDR = (?P<mme_ip_addr>[0-9\.]+)', mySSH.getBefore())
if result is not None:
self.MmeIPAddress = result.group('mme_ip_addr')
logging.debug('MME IP Address is ' + self.MmeIPAddress)
mySSH.close()
else:
self.MmeIPAddress = self.IPAddress
def InitializeSPGW(self, HTML):
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.SourceCodePath == '' or self.Type == '':
HELP.GenericHelp(CONST.Version)
HELP.EPCSrvHelp(self.IPAddress, self.UserName, self.Password, self.SourceCodePath, self.Type)
sys.exit('Insufficient EPC Parameters')
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 SPGW-CUPS in Docker')
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "nohup tshark -i eth0 -i lo:p5c -i lo:s5c -f "not port 2152" -w /tmp/spgwc_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwu-tiny /bin/bash -c "nohup tshark -i eth0 -f "not port 2152" -w /tmp/spgwu_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
time.sleep(5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "nohup ./bin/oai_spgwc -o -c ./etc/spgw_c.conf > spgwc_check_run.log 2>&1"', '\$', 5)
time.sleep(5)
mySSH.command('docker exec -d ' + self.containerPrefix + '-oai-spgwu-tiny /bin/bash -c "nohup ./bin/oai_spgwu -o -c ./etc/spgw_u.conf > spgwu_check_run.log 2>&1"', '\$', 5)
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
logging.debug('Using the OAI EPC Release 14 SPGW-CUPS')
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f spgwc_' + self.testCase_id + '.log spgwu_' + self.testCase_id + '.log', '\$', 5)
mySSH.command('echo "spgwc -c /usr/local/etc/oai/spgw_c.conf" > ./my-spgwc.sh', '\$', 5)
mySSH.command('chmod 755 ./my-spgwc.sh', '\$', 5)
mySSH.command('sudo daemon --unsafe --name=spgwc_daemon --chdir=' + self.SourceCodePath + '/scripts -o ' + self.SourceCodePath + '/scripts/spgwc_' + self.testCase_id + '.log ./my-spgwc.sh', '\$', 5)
time.sleep(5)
mySSH.command('echo "spgwu -c /usr/local/etc/oai/spgw_u.conf" > ./my-spgwu.sh', '\$', 5)
mySSH.command('chmod 755 ./my-spgwu.sh', '\$', 5)
mySSH.command('sudo daemon --unsafe --name=spgwu_daemon --chdir=' + self.SourceCodePath + '/scripts -o ' + self.SourceCodePath + '/scripts/spgwu_' + self.testCase_id + '.log ./my-spgwu.sh', '\$', 5)
elif re.match('OAI', self.Type, re.IGNORECASE):
mySSH.command('cd ' + self.SourceCodePath, '\$', 5)
mySSH.command('source oaienv', '\$', 5)
mySSH.command('cd scripts', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./run_spgw 2>&1 | stdbuf -o0 tee -a spgw_' + self.testCase_id + '.log &', 'Initializing SPGW-APP task interface: DONE', 30)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./start_xGw', '\$', 5)
else:
logging.error('This option should not occur!')
mySSH.close()
HTML.CreateHtmlTestRow(self.Type, 'OK', CONST.ALL_PROCESSES_OK)
return True
def Initialize5GCN(self, HTML):
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.Type == '':
HELP.GenericHelp(CONST.Version)
HELP.EPCSrvHelp(self.IPAddress, self.UserName, self.Password, self.Type)
logging.error('Insufficient EPC Parameters')
return False
mySSH = cls_cmd.getConnection(self.IPAddress)
html_cell = ''
if re.match('ltebox', self.Type, re.IGNORECASE):
logging.debug('Using the SABOX simulated HSS')
mySSH.command('if [ -d ' + self.SourceCodePath + '/scripts ]; then echo ' + self.Password + ' | sudo -S rm -Rf ' + self.SourceCodePath + '/scripts ; fi', '\$', 5)
mySSH.command('mkdir -p ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('cd /opt/hss_sim0609', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f hss.log', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S echo "Starting sudo session" && sudo su -c "screen -dm -S simulated_5g_hss ./start_5g_hss"', '\$', 5)
logging.debug('Using the sabox')
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./start_sabox', '\$', 5)
html_cell += 'N/A\n'
elif re.match('OAICN5G', self.Type, re.IGNORECASE):
logging.debug('Starting OAI CN5G')
mySSH.command('if [ -d ' + self.SourceCodePath + '/scripts ]; then echo ' + self.Password + ' | sudo -S rm -Rf ' + self.SourceCodePath + '/scripts ; fi', '\$', 5)
mySSH.command('mkdir -p ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command(f'cd {self.SourceCodePath}/docker-compose', '\$', 5)
mySSH.command('python3 ./core-network.py '+self.cfgDeploy, '\$', 60)
if re.search('start-mini-as-ue', self.cfgDeploy):
dFile = 'docker-compose-mini-nrf-asue.yaml'
elif re.search('basic', self.cfgDeploy):\
dFile = 'docker-compose-basic-nrf.yaml'
else:
dFile = 'docker-compose-mini-nrf.yaml'
mySSH.command('docker-compose -f ' + dFile + ' ps -a', '\$', 60)
if mySSH.getBefore().count('Up (healthy)') != 6:
logging.error('Not all container healthy')
else:
logging.debug('OK --> all containers are healthy')
mySSH.command('docker-compose -f ' + dFile + ' config | grep --colour=never image', '\$', 10)
listOfImages = mySSH.getBefore()
for imageLine in listOfImages.split('\\r\\n'):
res1 = re.search('image: (?P<name>[a-zA-Z0-9\-/]+):(?P<tag>[a-zA-Z0-9\-]+)', str(imageLine))
res2 = re.search('mysql', str(imageLine))
if res1 is not None and res2 is None:
html_cell += res1.group('name') + ':' + res1.group('tag') + ' '
nbChars = len(res1.group('name')) + len(res1.group('tag')) + 2
while (nbChars < 32):
html_cell += ' '
nbChars += 1
mySSH.command('docker image inspect --format="Size = {{.Size}} bytes" ' + res1.group('name') + ':' + res1.group('tag'), '\$', 10)
res3 = re.search('Size *= *(?P<size>[0-9\-]*) *bytes', mySSH.getBefore())
if res3 is not None:
imageSize = int(res3.group('size'))
imageSize = int(imageSize/(1024*1024))
html_cell += str(imageSize) + ' MBytes '
mySSH.command('docker image inspect --format="Date = {{.Created}}" ' + res1.group('name') + ':' + res1.group('tag'), '\$', 10)
res4 = re.search('Date *= *(?P<date>[0-9\-]*)T', mySSH.getBefore())
if res4 is not None:
html_cell += '(' + res4.group('date') + ')'
html_cell += '\n'
elif re.match('OC-OAI-CN5G', self.Type, re.IGNORECASE):
cn = cls_module.Module_UE(self.cnID)
succeeded, report = OC.OC_deploy_CN(mySSH, self.OCUserName, self.OCPassword, cn.getNamespace(), cn.getCNPath())
if not succeeded:
HTML.CreateHtmlTestRow('N/A', 'KO', report)
HTML.CreateHtmlTabFooter(False)
mySSH.close()
logging.error("OC OAI CN5G: CN deployment failed!")
return False
for line in report.stdout.split('\n')[1:]:
columns = line.strip().split()
name = columns[0]
status = columns[2]
html_cell += status + ' ' + name
html_cell += '\n'
else:
logging.error('This option should not occur!')
mySSH.close()
HTML.CreateHtmlTestRowQueue(self.Type, 'OK', [html_cell])
return True
def SetAmfIPAddress(self):
# Not an error if we don't need an 5GCN
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.SourceCodePath == '' or self.Type == '':
return
if self.IPAddress == 'none':
return
if re.match('ltebox', self.Type, re.IGNORECASE):
self.MmeIPAddress = self.IPAddress
elif re.match('OAICN5G', self.Type, re.IGNORECASE):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
response=mySSH.command3('docker container ls -f name=oai-amf', 10)
if len(response)>1:
response=mySSH.command3('docker inspect --format=\'{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}\' oai-amf', 10)
tmp = str(response[0],'utf-8')
self.MmeIPAddress = tmp.rstrip()
logging.debug('AMF IP Address ' + self.MmeIPAddress)
else:
logging.error('no container with name oai-amf found, could not retrieve AMF IP address')
mySSH.close()
elif re.match('OC-OAI-CN5G', self.Type, re.IGNORECASE):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
response=mySSH.command3('oc pods ls -f name=oai-amf', 10)
def CheckHSSProcess(self, status_queue):
try:
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker top ' + self.containerPrefix + '-oai-hss', '\$', 5)
else:
mySSH.command('stdbuf -o0 ps -aux | grep --color=never hss | grep -v grep', '\$', 5)
if re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE) or re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
result = re.search('oai_hss -j', mySSH.getBefore())
elif re.match('OAI', self.Type, re.IGNORECASE):
result = re.search('\/bin\/bash .\/run_', mySSH.getBefore())
elif re.match('ltebox', self.Type, re.IGNORECASE):
result = re.search('hss_sim s6as diam_hss', mySSH.getBefore())
else:
logging.error('This should not happen!')
if result is None:
logging.debug('\u001B[1;37;41m HSS Process Not Found! \u001B[0m')
status_queue.put(CONST.HSS_PROCESS_FAILED)
else:
status_queue.put(CONST.HSS_PROCESS_OK)
mySSH.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def CheckMMEProcess(self, status_queue):
try:
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
self.isMagmaUsed = False
mySSH.command('docker ps -a', '\$', 5)
result = re.search('magma', mySSH.getBefore())
if result is not None:
self.isMagmaUsed = True
if self.isMagmaUsed:
mySSH.command('docker top ' + self.containerPrefix + '-magma-mme', '\$', 5)
else:
mySSH.command('docker top ' + self.containerPrefix + '-oai-mme', '\$', 5)
else:
mySSH.command('stdbuf -o0 ps -aux | grep --color=never mme | grep -v grep', '\$', 5)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
result = re.search('oai_mme -c ', mySSH.getBefore())
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
result = re.search('mme -c', mySSH.getBefore())
elif re.match('OAI', self.Type, re.IGNORECASE):
result = re.search('\/bin\/bash .\/run_', mySSH.getBefore())
elif re.match('ltebox', self.Type, re.IGNORECASE):
result = re.search('mme|amf', mySSH.getBefore())
else:
logging.error('This should not happen!')
if result is None:
logging.debug('\u001B[1;37;41m MME|AMF Process Not Found! \u001B[0m')
status_queue.put(CONST.MME_PROCESS_FAILED)
else:
status_queue.put(CONST.MME_PROCESS_OK)
mySSH.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def CheckSPGWProcess(self, status_queue):
try:
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker top ' + self.containerPrefix + '-oai-spgwc', '\$', 5)
result = re.search('oai_spgwc -', mySSH.getBefore())
if result is not None:
mySSH.command('docker top ' + self.containerPrefix + '-oai-spgwu-tiny', '\$', 5)
result = re.search('oai_spgwu -', mySSH.getBefore())
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('stdbuf -o0 ps -aux | grep --color=never spgw | grep -v grep', '\$', 5)
result = re.search('spgwu -c ', mySSH.getBefore())
elif re.match('OAI', self.Type, re.IGNORECASE):
mySSH.command('stdbuf -o0 ps -aux | grep --color=never spgw | grep -v grep', '\$', 5)
result = re.search('\/bin\/bash .\/run_', mySSH.getBefore())
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('stdbuf -o0 ps -aux | grep --color=never xGw | grep -v grep', '\$', 5)
result = re.search('xGw|upf', mySSH.getBefore())
else:
logging.error('This should not happen!')
if result is None:
logging.debug('\u001B[1;37;41m SPGW|UPF Process Not Found! \u001B[0m')
status_queue.put(CONST.SPGW_PROCESS_FAILED)
else:
status_queue.put(CONST.SPGW_PROCESS_OK)
mySSH.close()
except:
os.kill(os.getppid(),signal.SIGUSR1)
def TerminateHSS(self, HTML):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-hss /bin/bash -c "killall --signal SIGINT oai_hss tshark"', '\$', 5)
time.sleep(2)
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-hss /bin/bash -c "ps aux | grep oai_hss"', '\$', 5)
result = re.search('oai_hss -j ', mySSH.getBefore())
if result is not None:
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-hss /bin/bash -c "killall --signal SIGKILL oai_hss"', '\$', 5)
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGINT oai_hss || true', '\$', 5)
time.sleep(2)
mySSH.command('stdbuf -o0 ps -aux | grep --colour=never hss | grep -v grep', '\$', 5)
result = re.search('oai_hss -j', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGKILL oai_hss || true', '\$', 5)
mySSH.command('rm -f ' + self.SourceCodePath + '/scripts/my-hss.sh', '\$', 5)
elif re.match('OAI', self.Type, re.IGNORECASE):
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGINT run_hss oai_hss || true', '\$', 5)
time.sleep(2)
mySSH.command('stdbuf -o0 ps -aux | grep --colour=never hss | grep -v grep', '\$', 5)
result = re.search('\/bin\/bash .\/run_', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGKILL run_hss oai_hss || true', '\$', 5)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cd ' + self.SourceCodePath, '\$', 5)
mySSH.command('cd scripts', '\$', 5)
time.sleep(1)
mySSH.command('echo ' + self.Password + ' | sudo -S screen -S simulated_hss -X quit', '\$', 5)
time.sleep(5)
mySSH.command('ps aux | grep --colour=never hss_sim | grep -v grep', '\$', 5, silent=True)
result = re.search('hss_sim s6as diam_hss', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall hss_sim', '\$', 5)
else:
logging.error('This should not happen!')
mySSH.close()
HTML.CreateHtmlTestRow('N/A', 'OK', CONST.ALL_PROCESSES_OK)
return True
def TerminateMME(self, HTML):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-mme /bin/bash -c "killall --signal SIGINT oai_mme tshark"', '\$', 5)
time.sleep(2)
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-mme /bin/bash -c "ps aux | grep oai_mme"', '\$', 5)
result = re.search('oai_mme -c ', mySSH.getBefore())
if result is not None:
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-mme /bin/bash -c "killall --signal SIGKILL oai_mme"', '\$', 5)
elif re.match('OAI', self.Type, re.IGNORECASE) or re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGINT run_mme mme || true', '\$', 5)
time.sleep(2)
mySSH.command('stdbuf -o0 ps -aux | grep mme | grep -v grep', '\$', 5)
result = re.search('mme -c', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGKILL run_mme mme || true', '\$', 5)
mySSH.command('rm -f ' + self.SourceCodePath + '/scripts/my-mme.sh', '\$', 5)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./stop_mme', '\$', 5)
time.sleep(5)
else:
logging.error('This should not happen!')
mySSH.close()
HTML.CreateHtmlTestRow('N/A', 'OK', CONST.ALL_PROCESSES_OK)
return True
def TerminateSPGW(self, HTML):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "killall --signal SIGINT oai_spgwc tshark"', '\$', 5)
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-spgwu-tiny /bin/bash -c "killall --signal SIGINT oai_spgwu tshark"', '\$', 5)
time.sleep(2)
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "ps aux | grep oai_spgwc"', '\$', 5)
result = re.search('oai_spgwc -o -c ', mySSH.getBefore())
if result is not None:
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-spgwc /bin/bash -c "killall --signal SIGKILL oai_spgwc"', '\$', 5)
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-spgwu-tiny /bin/bash -c "ps aux | grep oai_spgwu"', '\$', 5)
result = re.search('oai_spgwu -o -c ', mySSH.getBefore())
if result is not None:
mySSH.command('docker exec -it ' + self.containerPrefix + '-oai-spgwu-tiny /bin/bash -c "killall --signal SIGKILL oai_spgwu"', '\$', 5)
elif re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGINT spgwc spgwu || true', '\$', 5)
time.sleep(2)
mySSH.command('stdbuf -o0 ps -aux | grep spgw | grep -v grep', '\$', 5)
result = re.search('spgwc -c |spgwu -c ', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGKILL spgwc spgwu || true', '\$', 5)
mySSH.command('rm -f ' + self.SourceCodePath + '/scripts/my-spgw*.sh', '\$', 5)
mySSH.command('stdbuf -o0 ps -aux | grep tshark | grep -v grep', '\$', 5)
result = re.search('-w ', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGINT tshark || true', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S chmod 666 ' + self.SourceCodePath + '/scripts/*.pcap', '\$', 5)
elif re.match('OAI', self.Type, re.IGNORECASE):
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGINT run_spgw spgw || true', '\$', 5)
time.sleep(2)
mySSH.command('stdbuf -o0 ps -aux | grep spgw | grep -v grep', '\$', 5)
result = re.search('\/bin\/bash .\/run_', mySSH.getBefore())
if result is not None:
mySSH.command('echo ' + self.Password + ' | sudo -S killall --signal SIGKILL run_spgw spgw || true', '\$', 5)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./stop_xGw', '\$', 5)
else:
logging.error('This should not happen!')
mySSH.close()
HTML.CreateHtmlTestRow('N/A', 'OK', CONST.ALL_PROCESSES_OK)
return True
def Terminate5GCN(self, HTML):
mySSH = cls_cmd.getConnection(self.IPAddress)
message = ''
if re.match('ltebox', self.Type, re.IGNORECASE):
logging.debug('Terminating SA BOX')
mySSH.command('cd /opt/ltebox/tools', '\$', 5)
mySSH.command('echo ' + self.Password + ' | sudo -S ./stop_sabox', '\$', 5)
time.sleep(1)
mySSH.command('cd ' + self.SourceCodePath, '\$', 5)
mySSH.command('cd scripts', '\$', 5)
time.sleep(1)
mySSH.command('echo ' + self.Password + ' | sudo -S screen -S simulated_5g_hss -X quit', '\$', 5)
elif re.match('OAICN5G', self.Type, re.IGNORECASE):
logging.debug('OAI CN5G Collecting Log files to workspace')
mySSH.command('echo ' + self.Password + ' | sudo rm -rf ' + self.SourceCodePath + '/logs', '\$', 5)
mySSH.command('mkdir ' + self.SourceCodePath + '/logs','\$', 5)
containers_list=['oai-smf','oai-spgwu','oai-amf','oai-nrf']
for c in containers_list:
mySSH.command('docker logs ' + c + ' > ' + self.SourceCodePath + '/logs/' + c + '.log', '\$', 5)
logging.debug('Terminating OAI CN5G')
mySSH.command(f'cd {self.SourceCodePath}/docker-compose', '\$', 5)
mySSH.command('python3 ./core-network.py '+self.cfgUnDeploy, '\$', 60)
mySSH.command('docker volume prune --force || true', '\$', 60)
time.sleep(2)
mySSH.command('tshark -r /tmp/oai-cn5g-v1.5.pcap | grep -E --colour=never "Tracking area update" ','\$', 30)
result = re.search('Tracking area update request', mySSH.getBefore())
if result is not None:
message = 'UE requested ' + str(mySSH.getBefore().count('Tracking area update request')) + 'Tracking area update request(s)'
else:
message = 'No Tracking area update request'
mySSH.run(f'cd {self.SourceCodePath}/logs && zip -r -qq test_logs_CN.zip *.log')
mySSH.copyin(f'{self.SourceCodePath}/logs/test_logs_CN.zip','test_logs_CN.zip')
logging.debug(message)
elif re.match('OC-OAI-CN5G', self.Type, re.IGNORECASE):
cn = cls_module.Module_UE(self.cnID)
succeeded, report = OC.OC_undeploy_CN(mySSH, self.OCUserName, self.OCPassword, cn.getNamespace(), cn.getCNPath())
if not succeeded:
HTML.CreateHtmlTestRow('N/A', 'KO', report)
HTML.CreateHtmlTabFooter(False)
logging.error("OC OAI CN5G: CN undeployment failed!")
return False
else:
message = report.stdout
else:
logging.error('This should not happen!')
mySSH.close()
HTML.CreateHtmlTestRowQueue(self.Type, 'OK', [message])
return True
def DeployEpc(self, HTML):
logging.debug('Trying to deploy')
if not re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
HTML.CreateHtmlTestRow(self.Type, 'KO', CONST.INVALID_PARAMETER)
HTML.CreateHtmlTabFooter(False)
logging.error('Deploy not possible with this EPC type: ' + self.Type)
return False
if self.IPAddress == '' or self.UserName == '' or self.Password == '' or self.SourceCodePath == '' or self.Type == '':
HELP.GenericHelp(CONST.Version)
HELP.EPCSrvHelp(self.IPAddress, self.UserName, self.Password, self.SourceCodePath, self.Type)
logging.error('Insufficient EPC Parameters')
return False
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
mySSH.command('docker-compose --version', '\$', 5)
result = re.search('docker-compose version 1|Docker Compose version v2', mySSH.getBefore())
if result is None:
mySSH.close()
HTML.CreateHtmlTestRow(self.Type, 'KO', CONST.INVALID_PARAMETER)
HTML.CreateHtmlTabFooter(False)
logging.error('docker-compose not installed on ' + self.IPAddress)
return False
# Checking if it is a MAGMA deployment
self.isMagmaUsed = False
if os.path.isfile('./' + self.yamlPath + '/redis_extern.conf'):
self.isMagmaUsed = True
logging.debug('MAGMA MME is used!')
mySSH.command('if [ -d ' + self.SourceCodePath + '/scripts ]; then echo ' + self.Password + ' | sudo -S rm -Rf ' + self.SourceCodePath + '/scripts ; fi', '\$', 5)
mySSH.command('if [ -d ' + self.SourceCodePath + '/logs ]; then echo ' + self.Password + ' | sudo -S rm -Rf ' + self.SourceCodePath + '/logs ; fi', '\$', 5)
mySSH.command('mkdir -p ' + self.SourceCodePath + '/scripts ' + self.SourceCodePath + '/logs', '\$', 5)
mySSH.command('rm -f ' + self.SourceCodePath + '/*.log', '\$', 5)
# deploying and configuring the cassandra database
# container names and services are currently hard-coded.
# they could be recovered by:
# - docker-compose config --services
# - docker-compose config | grep container_name
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/docker-compose.yml', self.SourceCodePath + '/scripts')
if self.isMagmaUsed:
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/entrypoint.sh', self.SourceCodePath + '/scripts')
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/mme.conf', self.SourceCodePath + '/scripts')
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/mme_fd.sprint.conf', self.SourceCodePath + '/scripts')
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/redis_extern.conf', self.SourceCodePath + '/scripts')
mySSH.command('chmod a+x ' + self.SourceCodePath + '/scripts/entrypoint.sh', '\$', 5)
else:
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/entrypoint.sh', self.SourceCodePath + '/scripts')
mySSH.copyout(self.IPAddress, self.UserName, self.Password, './' + self.yamlPath + '/mme.conf', self.SourceCodePath + '/scripts')
mySSH.command('chmod 775 entrypoint.sh', '\$', 60)
mySSH.command('wget --quiet --tries=3 --retry-connrefused https://raw.githubusercontent.com/OPENAIRINTERFACE/openair-hss/develop/src/hss_rel14/db/oai_db.cql', '\$', 30)
mySSH.command('docker-compose down -v', '\$', 60)
mySSH.command('docker-compose up -d db_init', '\$', 60)
# databases take time...
time.sleep(10)
cnt = 0
db_init_status = False
while (cnt < 10):
mySSH.command('docker logs prod-db-init', '\$', 5)
result = re.search('OK', mySSH.getBefore())
if result is not None:
cnt = 10
db_init_status = True
else:
time.sleep(5)
cnt += 1
mySSH.command('docker rm -f prod-db-init', '\$', 5)
if not db_init_status:
HTML.CreateHtmlTestRow(self.Type, 'KO', CONST.INVALID_PARAMETER)
HTML.CreateHtmlTabFooter(False)
logging.error('Cassandra DB deployment/configuration went wrong!')
return True
# deploying EPC cNFs
mySSH.command('docker-compose up -d oai_spgwu', '\$', 60)
if self.isMagmaUsed:
listOfContainers = 'prod-cassandra prod-oai-hss prod-magma-mme prod-oai-spgwc prod-oai-spgwu-tiny prod-redis'
expectedHealthyContainers = 6
else:
listOfContainers = 'prod-cassandra prod-oai-hss prod-oai-mme prod-oai-spgwc prod-oai-spgwu-tiny'
expectedHealthyContainers = 5
# Checking for additional services
mySSH.command('docker-compose config', '\$', 5)
configResponse = mySSH.getBefore()
if configResponse.count('trf_gen') == 1:
mySSH.command('docker-compose up -d trf_gen', '\$', 60)
listOfContainers += ' prod-trf-gen'
expectedHealthyContainers += 1
mySSH.command('docker-compose config | grep --colour=never image', '\$', 10)
html_cell = ''
listOfImages = mySSH.getBefore()
for imageLine in listOfImages.split('\\r\\n'):
res1 = re.search('image: (?P<name>[a-zA-Z0-9\-]+):(?P<tag>[a-zA-Z0-9\-]+)', str(imageLine))
res2 = re.search('cassandra|redis', str(imageLine))
if res1 is not None and res2 is None:
html_cell += res1.group('name') + ':' + res1.group('tag') + ' '
nbChars = len(res1.group('name')) + len(res1.group('tag')) + 2
while (nbChars < 32):
html_cell += ' '
nbChars += 1
mySSH.command('docker image inspect --format="Size = {{.Size}} bytes" ' + res1.group('name') + ':' + res1.group('tag'), '\$', 10)
res3 = re.search('Size *= *(?P<size>[0-9\-]*) *bytes', mySSH.getBefore())
if res3 is not None:
imageSize = int(res3.group('size'))
imageSize = int(imageSize/(1024*1024))
html_cell += str(imageSize) + ' MBytes '
mySSH.command('docker image inspect --format="Date = {{.Created}}" ' + res1.group('name') + ':' + res1.group('tag'), '\$', 10)
res4 = re.search('Date *= *(?P<date>[0-9\-]*)T', mySSH.getBefore())
if res4 is not None:
html_cell += '(' + res4.group('date') + ')'
html_cell += '\n'
# Checking if all are healthy
cnt = 0
while (cnt < 3):
mySSH.command('docker inspect --format=\'{{.State.Health.Status}}\' ' + listOfContainers, '\$', 10)
unhealthyNb = mySSH.getBefore().count('unhealthy')
healthyNb = mySSH.getBefore().count('healthy') - unhealthyNb
startingNb = mySSH.getBefore().count('starting')
if healthyNb == expectedHealthyContainers:
cnt = 10
else:
time.sleep(10)
cnt += 1
logging.debug(' -- ' + str(healthyNb) + ' healthy container(s)')
logging.debug(' -- ' + str(unhealthyNb) + ' unhealthy container(s)')
logging.debug(' -- ' + str(startingNb) + ' still starting container(s)')
if healthyNb == expectedHealthyContainers:
mySSH.command('docker exec -d prod-oai-hss /bin/bash -c "nohup tshark -i any -f \'port 9042 or port 3868\' -w /tmp/hss_check_run.pcap 2>&1 > /dev/null"', '\$', 5)
if self.isMagmaUsed:
mySSH.command('docker exec -d prod-magma-mme /bin/bash -c "nohup tshark -i any -f \'port 3868 or port 2123 or port 36412\' -w /tmp/mme_check_run.pcap 2>&1 > /dev/null"', '\$', 10)
else:
mySSH.command('docker exec -d prod-oai-mme /bin/bash -c "nohup tshark -i any -f \'port 3868 or port 2123 or port 36412\' -w /tmp/mme_check_run.pcap 2>&1 > /dev/null"', '\$', 10)
mySSH.command('docker exec -d prod-oai-spgwc /bin/bash -c "nohup tshark -i any -f \'port 2123 or port 8805\' -w /tmp/spgwc_check_run.pcap 2>&1 > /dev/null"', '\$', 10)
# on SPGW-U, not capturing on SGI to avoid huge file
mySSH.command('docker exec -d prod-oai-spgwu-tiny /bin/bash -c "nohup tshark -i any -f \'port 8805\' -w /tmp/spgwu_check_run.pcap 2>&1 > /dev/null"', '\$', 10)
mySSH.close()
logging.debug('Deployment OK')
HTML.CreateHtmlTestRowQueue(self.Type, 'OK', [html_cell])
return True
else:
mySSH.close()
logging.debug('Deployment went wrong')
HTML.CreateHtmlTestRowQueue(self.Type, 'KO', [html_cell])
return False
def UndeployEpc(self, HTML):
logging.debug('Trying to undeploy')
# No check down, we suppose everything done before.
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
# Checking if it is a MAGMA deployment.
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('docker-compose ps -a', '\$', 5)
self.isMagmaUsed = False
result = re.search('magma', mySSH.getBefore())
if result is not None:
self.isMagmaUsed = True
logging.debug('MAGMA MME is used!')
# Recovering logs and pcap files
mySSH.command('cd ' + self.SourceCodePath + '/logs', '\$', 5)
mySSH.command('docker exec -it prod-oai-hss /bin/bash -c "killall --signal SIGINT oai_hss tshark"', '\$', 5)
if self.isMagmaUsed:
mySSH.command('docker exec -it prod-magma-mme /bin/bash -c "killall --signal SIGINT tshark"', '\$', 5)
else:
mySSH.command('docker exec -it prod-oai-mme /bin/bash -c "killall --signal SIGINT tshark"', '\$', 5)
mySSH.command('docker exec -it prod-oai-spgwc /bin/bash -c "killall --signal SIGINT oai_spgwc tshark"', '\$', 5)
mySSH.command('docker exec -it prod-oai-spgwu-tiny /bin/bash -c "killall --signal SIGINT tshark"', '\$', 5)
mySSH.command('docker logs prod-oai-hss > hss_' + self.testCase_id + '.log', '\$', 5)
if self.isMagmaUsed:
mySSH.command('docker cp --follow-link prod-magma-mme:/var/log/mme.log mme_' + self.testCase_id + '.log', '\$', 15)
else:
mySSH.command('docker logs prod-oai-mme > mme_' + self.testCase_id + '.log', '\$', 5)
mySSH.command('docker logs prod-oai-spgwc > spgwc_' + self.testCase_id + '.log', '\$', 5)
mySSH.command('docker logs prod-oai-spgwu-tiny > spgwu_' + self.testCase_id + '.log', '\$', 5)
mySSH.command('docker cp prod-oai-hss:/tmp/hss_check_run.pcap hss_' + self.testCase_id + '.pcap', '\$', 60)
if self.isMagmaUsed:
mySSH.command('docker cp prod-magma-mme:/tmp/mme_check_run.pcap mme_' + self.testCase_id + '.pcap', '\$', 60)
else:
mySSH.command('docker cp prod-oai-mme:/tmp/mme_check_run.pcap mme_' + self.testCase_id + '.pcap', '\$', 60)
mySSH.command('tshark -r mme_' + self.testCase_id + '.pcap | grep -E --colour=never "Tracking area update"', '\$', 60)
result = re.search('Tracking area update request', mySSH.getBefore())
if result is not None:
message = 'UE requested ' + str(mySSH.getBefore().count('Tracking area update request')) + 'Tracking area update request(s)'
else:
message = 'No Tracking area update request'
logging.debug(message)
mySSH.command('docker cp prod-oai-spgwc:/tmp/spgwc_check_run.pcap spgwc_' + self.testCase_id + '.pcap', '\$', 60)
mySSH.command('docker cp prod-oai-spgwu-tiny:/tmp/spgwu_check_run.pcap spgwu_' + self.testCase_id + '.pcap', '\$', 60)
# Remove all
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
if self.isMagmaUsed:
listOfContainers = 'prod-cassandra prod-oai-hss prod-magma-mme prod-oai-spgwc prod-oai-spgwu-tiny prod-redis'
nbContainers = 6
else:
listOfContainers = 'prod-cassandra prod-oai-hss prod-oai-mme prod-oai-spgwc prod-oai-spgwu-tiny'
nbContainers = 5
# Checking for additional services
mySSH.command('docker-compose config', '\$', 5)
configResponse = mySSH.getBefore()
if configResponse.count('trf_gen') == 1:
listOfContainers += ' prod-trf-gen'
nbContainers += 1
mySSH.command('docker-compose down -v', '\$', 60)
mySSH.command('docker inspect --format=\'{{.State.Health.Status}}\' ' + listOfContainers, '\$', 10)
noMoreContainerNb = mySSH.getBefore().count('No such object')
mySSH.command('docker inspect --format=\'{{.Name}}\' prod-oai-public-net prod-oai-private-net', '\$', 10)
noMoreNetworkNb = mySSH.getBefore().count('No such object')
mySSH.close()
if noMoreContainerNb == nbContainers and noMoreNetworkNb == 2:
logging.debug('Undeployment OK')
HTML.CreateHtmlTestRowQueue(self.Type, 'OK', [message])
return True
else:
logging.debug('Undeployment went wrong')
HTML.CreateHtmlTestRowQueue(self.Type, 'KO', [message])
return False
def LogCollectHSS(self):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('rm -f hss.log.zip', '\$', 5)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker inspect prod-oai-hss', '\$', 10)
result = re.search('No such object', mySSH.getBefore())
if result is not None:
mySSH.command('cd ../logs', '\$', 5)
mySSH.command('rm -f hss.log.zip', '\$', 5)
mySSH.command('zip hss.log.zip hss_*.*', '\$', 60)
mySSH.command('mv hss.log.zip ../scripts', '\$', 60)
else:
mySSH.command('docker cp ' + self.containerPrefix + '-oai-hss:/openair-hss/hss_check_run.log .', '\$', 60)
mySSH.command('docker cp ' + self.containerPrefix + '-oai-hss:/tmp/hss_check_run.pcap .', '\$', 60)
mySSH.command('zip hss.log.zip hss_check_run.*', '\$', 60)
elif re.match('OAICN5G', self.Type, re.IGNORECASE):
logging.debug('LogCollect is bypassed for that variant')
elif re.match('OC-OAI-CN5G', self.Type, re.IGNORECASE):
logging.debug('LogCollect is bypassed for that variant')
elif re.match('OAI', self.Type, re.IGNORECASE) or re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('zip hss.log.zip hss*.log', '\$', 60)
mySSH.command('echo ' + self.Password + ' | sudo -S rm hss*.log', '\$', 5)
if re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('zip hss.log.zip logs/hss*.* *.pcap', '\$', 60)
mySSH.command('echo ' + self.Password + ' | sudo -S rm -f logs/hss*.* *.pcap', '\$', 5)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cp /opt/hss_sim0609/hss.log .', '\$', 60)
mySSH.command('zip hss.log.zip hss.log', '\$', 60)
else:
logging.error('This option should not occur!')
mySSH.close()
def LogCollectMME(self):
if self.Type != 'OC-OAI-CN5G':
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('rm -f mme.log.zip', '\$', 5)
else:
mySSH = cls_cmd.getConnection(self.IPAddress)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker inspect prod-oai-mme', '\$', 10)
result = re.search('No such object', mySSH.getBefore())
if result is not None:
mySSH.command('cd ../logs', '\$', 5)
mySSH.command('rm -f mme.log.zip', '\$', 5)
mySSH.command('zip mme.log.zip mme_*.*', '\$', 60)
mySSH.command('mv mme.log.zip ../scripts', '\$', 60)
else:
mySSH.command('docker cp ' + self.containerPrefix + '-oai-mme:/openair-mme/mme_check_run.log .', '\$', 60)
mySSH.command('docker cp ' + self.containerPrefix + '-oai-mme:/tmp/mme_check_run.pcap .', '\$', 60)
mySSH.command('zip mme.log.zip mme_check_run.*', '\$', 60)
elif re.match('OAICN5G', self.Type, re.IGNORECASE):
mySSH.command('cd ' + self.SourceCodePath + '/logs','\$', 5)
mySSH.command('cp -f /tmp/oai-cn5g-v1.5.pcap .','\$', 30)
mySSH.command('zip mme.log.zip oai-amf.log oai-nrf.log oai-cn5g*.pcap','\$', 30)
mySSH.command('mv mme.log.zip ' + self.SourceCodePath + '/scripts','\$', 30)
elif re.match('OC-OAI-CN5G', self.Type, re.IGNORECASE):
mySSH.run('cd ' + self.SourceCodePath + '/logs')
mySSH.run('zip mme.log.zip oai-amf.log oai-nrf.log oai-cn5g*.pcap')
mySSH.run('mv mme.log.zip ' + self.SourceCodePath + '/scripts')
elif re.match('OAI', self.Type, re.IGNORECASE) or re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('zip mme.log.zip mme*.log', '\$', 60)
mySSH.command('echo ' + self.Password + ' | sudo -S rm mme*.log', '\$', 5)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cp /opt/ltebox/var/log/*Log.0 .', '\$', 5)
mySSH.command('zip mme.log.zip mmeLog.0 s1apcLog.0 s1apsLog.0 s11cLog.0 libLog.0 s1apCodecLog.0 amfLog.0 ngapcLog.0 ngapcommonLog.0 ngapsLog.0', '\$', 60)
else:
logging.error('This option should not occur!')
mySSH.close()
def LogCollectSPGW(self):
mySSH = SSH.SSHConnection()
mySSH.open(self.IPAddress, self.UserName, self.Password)
mySSH.command('cd ' + self.SourceCodePath + '/scripts', '\$', 5)
mySSH.command('rm -f spgw.log.zip', '\$', 5)
if re.match('OAI-Rel14-Docker', self.Type, re.IGNORECASE):
mySSH.command('docker inspect prod-oai-mme', '\$', 10)
result = re.search('No such object', mySSH.getBefore())
if result is not None:
mySSH.command('cd ../logs', '\$', 5)
mySSH.command('rm -f spgw.log.zip', '\$', 5)
mySSH.command('zip spgw.log.zip spgw*.*', '\$', 60)
mySSH.command('mv spgw.log.zip ../scripts', '\$', 60)
else:
mySSH.command('docker cp ' + self.containerPrefix + '-oai-spgwc:/openair-spgwc/spgwc_check_run.log .', '\$', 60)
mySSH.command('docker cp ' + self.containerPrefix + '-oai-spgwu-tiny:/openair-spgwu-tiny/spgwu_check_run.log .', '\$', 60)
mySSH.command('docker cp ' + self.containerPrefix + '-oai-spgwc:/tmp/spgwc_check_run.pcap .', '\$', 60)
mySSH.command('docker cp ' + self.containerPrefix + '-oai-spgwu-tiny:/tmp/spgwu_check_run.pcap .', '\$', 60)
mySSH.command('zip spgw.log.zip spgw*_check_run.*', '\$', 60)
elif re.match('OAICN5G', self.Type, re.IGNORECASE):
mySSH.command('cd ' + self.SourceCodePath + '/logs','\$', 5)
mySSH.command('zip spgw.log.zip oai-smf.log oai-spgwu.log','\$', 30)
mySSH.command('mv spgw.log.zip ' + self.SourceCodePath + '/scripts','\$', 30)
elif re.match('OC-OAI-CN5G', self.Type, re.IGNORECASE):
mySSH.command('cd ' + self.SourceCodePath + '/logs','\$', 5)
mySSH.command('zip spgw.log.zip oai-smf.log oai-spgwu.log','\$', 30)
mySSH.command('mv spgw.log.zip ' + self.SourceCodePath + '/scripts','\$', 30)
elif re.match('OAI', self.Type, re.IGNORECASE) or re.match('OAI-Rel14-CUPS', self.Type, re.IGNORECASE):
mySSH.command('zip spgw.log.zip spgw*.log', '\$', 60)
mySSH.command('echo ' + self.Password + ' | sudo -S rm spgw*.log', '\$', 5)
elif re.match('ltebox', self.Type, re.IGNORECASE):
mySSH.command('cp /opt/ltebox/var/log/*Log.0 .', '\$', 5)
mySSH.command('zip spgw.log.zip xGwLog.0 upfLog.0', '\$', 60)
else:
logging.error('This option should not occur!')
mySSH.close()
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
# Python for CI of OAI-eNB + COTS-UE
#
# Required Python Version
# Python 3.x
#
# Required Python Package
# pexpect
#---------------------------------------------------------------------
#-----------------------------------------------------------
# Functions Declaration
#-----------------------------------------------------------
def GenericHelp(vers):
print('----------------------------------------------------------------------------------------------------------------------')
print('main.py Ver: ' + vers)
print('----------------------------------------------------------------------------------------------------------------------')
print('python main.py [options]')
print(' --help Show this help.')
print(' --mode=[Mode]')
print(' TesteNB')
print(' InitiateHtml, FinalizeHtml')
print(' TerminateeNB, TerminateHSS, TerminateMME, TerminateSPGW')
print(' LogCollectBuild, LogCollecteNB, LogCollectHSS, LogCollectMME, LogCollectSPGW, LogCollectPing, LogCollectIperf')
print(' --local Force local execution: rewrites the test xml script before running to always execute on localhost. Assumes')
print(' images are available locally, will not remove any images and will run inside the current repo directory')
def GitSrvHelp(repository,branch,commit,mergeallow,targetbranch):
print(' --ranRepository=[OAI RAN Repository URL] -- ' + repository)
print(' --ranBranch=[OAI RAN Repository Branch] -- ' + branch)
print(' --ranCommitID=[OAI RAN Repository Commit SHA-1] -- ' + commit)
print(' --ranAllowMerge=[Allow Merge Request (with target branch) (true or false)] -- ' + mergeallow)
print(' --ranTargetBranch=[Target Branch in case of a Merge Request] -- ' + targetbranch)
def eNBSrvHelp(ipaddr, username, password, sourcepath):
print(' --eNBIPAddress=[eNB\'s IP Address] -- ' + ipaddr)
print(' --eNBUserName=[eNB\'s Login User Name] -- ' + username)
print(' --eNBPassword=[eNB\'s Login Password] -- ' + password)
print(' --eNBSourceCodePath=[eNB\'s Source Code Path] -- ' + sourcepath)
def OAIUESrvHelp(ipaddr, username, password, sourcepath):
print(' --UEIPAddress=[UE\'s IP Address] -- ' + ipaddr)
print(' --UEUserName=[UE\'s Login User Name] -- ' + username)
print(' --UEPassword=[UE\'s Login Password] -- ' + password)
print(' --UESourceCodePath=[UE\'s Source Code Path] -- ' + sourcepath)
def EPCSrvHelp(ipaddr, username, password, sourcepath, epctype):
print(' --EPCIPAddress=[EPC\'s IP Address] -- ' + ipaddr)
print(' --EPCUserName=[EPC\'s Login User Name] -- ' + username)
print(' --EPCPassword=[EPC\'s Login Password] -- ' + password)
print(' --EPCSourceCodePath=[EPC\'s Source Code Path] -- ' + sourcepath)
print(' --EPCType=[EPC\'s Type: OAI or ltebox or OC-OAI-CN5G] -- ' + epctype)
def XmlHelp(filename):
print(' --XMLTestFile=[XML Test File to be run] -- ' + filename)
print(' Note: multiple xml files can be specified (--XMLFile=File1 ... --XMLTestFile=FileN) when HTML headers are created ("InitiateHtml" mode)')