Commit 9fa556ff authored by Gabriel's avatar Gabriel

Merge branch 'Enhancement-142-OAI_UE_autotest_framework' into develop_integration_w50

parents ca078ff5 af68b397
...@@ -235,6 +235,7 @@ add_boolean_option(DEBUG_OMG False "???") ...@@ -235,6 +235,7 @@ add_boolean_option(DEBUG_OMG False "???")
add_boolean_option(XFORMS False "This adds the possibility to see the signal oscilloscope") add_boolean_option(XFORMS False "This adds the possibility to see the signal oscilloscope")
add_boolean_option(PRINT_STATS False "This adds the possibility to see the status") add_boolean_option(PRINT_STATS False "This adds the possibility to see the status")
add_boolean_option(T_TRACER False "Activate the T tracer, a debugging/monitoring framework" ) add_boolean_option(T_TRACER False "Activate the T tracer, a debugging/monitoring framework" )
add_boolean_option(UE_AUTOTEST_TRACE False "Activate UE autotest specific logs")
add_boolean_option(DEBUG_CONSOLE False "makes debugging easier, disables stdout/stderr buffering") add_boolean_option(DEBUG_CONSOLE False "makes debugging easier, disables stdout/stderr buffering")
......
...@@ -75,7 +75,7 @@ class openair(core): ...@@ -75,7 +75,7 @@ class openair(core):
return (stdout, stderr) return (stdout, stderr)
def connect(self, username, password, prompt='PEXPECT_OAI'): def connect(self, username, password, prompt='PEXPECT_OAI'):
max_retries=100 max_retries=10
i=0 i=0
while i <= max_retries: while i <= max_retries:
self.prompt1 = prompt self.prompt1 = prompt
...@@ -97,9 +97,9 @@ class openair(core): ...@@ -97,9 +97,9 @@ class openair(core):
# need to look for twice the string of the prompt # need to look for twice the string of the prompt
self.oai.prompt() self.oai.prompt()
self.oai.prompt() self.oai.prompt()
self.oai.sendline('uptime') # self.oai.sendline('uptime')
self.oai.prompt() # self.oai.prompt()
print self.oai.before # print self.oai.before
break break
except Exception, e: except Exception, e:
error='' error=''
...@@ -171,7 +171,7 @@ class openair(core): ...@@ -171,7 +171,7 @@ class openair(core):
sys.exit(1) sys.exit(1)
def disconnect(self): def disconnect(self):
print 'disconnecting the ssh connection to ' + self.address + '\n' # print 'disconnecting the ssh connection to ' + self.address + '\n'
self.oai.send('exit') self.oai.send('exit')
# self.cancel() # self.cancel()
......
diff --git a/cmake_targets/CMakeLists.txt b/cmake_targets/CMakeLists.txt
index 07d92a1..b6a02d1 100644
--- a/cmake_targets/CMakeLists.txt
+++ b/cmake_targets/CMakeLists.txt
@@ -235,6 +235,7 @@ add_boolean_option(DEBUG_OMG False "???")
add_boolean_option(XFORMS False "This adds the possibility to see the signal oscilloscope")
add_boolean_option(PRINT_STATS False "This adds the possibility to see the status")
add_boolean_option(T_TRACER False "Activate the T tracer, a debugging/monitoring framework" )
+add_boolean_option(UE_AUTOTEST_TRACE False "Activate UE autotest specific logs")
add_boolean_option(DEBUG_CONSOLE False "makes debugging easier, disables stdout/stderr buffering")
diff --git a/openair1/PHY/LTE_TRANSPORT/initial_sync.c b/openair1/PHY/LTE_TRANSPORT/initial_sync.c
index 663978e..b77cb1b 100644
--- a/openair1/PHY/LTE_TRANSPORT/initial_sync.c
+++ b/openair1/PHY/LTE_TRANSPORT/initial_sync.c
@@ -476,6 +476,15 @@ int initial_sync(PHY_VARS_UE *ue, runmode_t mode)
//#endif
if (ue->UE_scan_carrier == 0) {
+
+ #if UE_AUTOTEST_TRACE
+ LOG_I(PHY,"[UE %d] AUTOTEST Cell Sync : frame = %d, rx_offset %d, freq_offset %d \n",
+ ue->Mod_id,
+ ue->proc.proc_rxtx[0].frame_rx,
+ ue->rx_offset,
+ ue->common_vars.freq_offset );
+ #endif
+
if (ue->mac_enabled==1) {
LOG_I(PHY,"[UE%d] Sending synch status to higher layers\n",ue->Mod_id);
//mac_resynch();
diff --git a/openair1/SCHED/phy_procedures_lte_ue.c b/openair1/SCHED/phy_procedures_lte_ue.c
index 5dc629b..353a049 100644
--- a/openair1/SCHED/phy_procedures_lte_ue.c
+++ b/openair1/SCHED/phy_procedures_lte_ue.c
@@ -3551,6 +3551,13 @@ int phy_procedures_UE_RX(PHY_VARS_UE *ue,UE_rxtx_proc_t *proc,uint8_t eNB_id,uin
LOG_D(PHY,"[UE %d] Calculating bitrate Frame %d: total_TBS = %d, total_TBS_last = %d, bitrate %f kbits\n",
ue->Mod_id,frame_rx,ue->total_TBS[eNB_id],
ue->total_TBS_last[eNB_id],(float) ue->bitrate[eNB_id]/1000.0);
+
+ #if UE_AUTOTEST_TRACE
+ if ((frame_rx % 100 == 0)) {
+ LOG_I(PHY,"[UE %d] AUTOTEST Metric : UE_DLSCH_BITRATE = %5.2f kbps (frame = %d) \n", ue->Mod_id, (float) ue->bitrate[eNB_id]/1000.0, frame_rx);
+ }
+ #endif
+
}
diff --git a/openair1/SCHED/phy_procedures_lte_ue.c b/openair1/SCHED/phy_procedures_lte_ue.c
index 5dc629b..4d31ad3 100644
--- a/openair1/SCHED/phy_procedures_lte_ue.c
+++ b/openair1/SCHED/phy_procedures_lte_ue.c
@@ -3347,6 +3347,22 @@ int phy_procedures_UE_RX(PHY_VARS_UE *ue,UE_rxtx_proc_t *proc,uint8_t eNB_id,uin
// first slot has been processed (FFTs + Channel Estimation, PCFICH/PHICH/PDCCH)
+ #if UE_AUTOTEST_TRACE
+ if ( (frame_rx % 10 == 0) && (subframe_rx == 0)) {
+ printf("AUTOTEST Metric : UE_FREQ_OFFSET = %d Hz (frame = %d) \n", ue->common_vars.freq_offset, frame_rx);
+ printf("AUTOTEST Metric : UE_RX_OFFSET = %d (frame = %d) \n", ue->rx_offset, frame_rx);
+
+ printf("AUTOTEST Metric : RRC Measurments RSRP[0] %.2f dBm/RE, RSSI %.2f dBm, RSRQ[0] %.2f dB, N0 %d dBm/RE, NF %.1f dB (frame = %d)\n",
+ 10*log10(ue->measurements.rsrp[0])-ue->rx_total_gain_dB,
+ 10*log10(ue->measurements.rssi)-ue->rx_total_gain_dB,
+ 10*log10(ue->measurements.rsrq[0]),
+ ue->measurements.n0_power_tot_dBm,
+ (double)ue->measurements.n0_power_tot_dBm+132.24,
+ frame_rx);
+ }
+ #endif
+
+
// do procedures for C-RNTI
if (ue->dlsch[eNB_id][0]->active == 1) {
VCD_SIGNAL_DUMPER_DUMP_FUNCTION_BY_NAME(VCD_SIGNAL_DUMPER_FUNCTIONS_PDSCH_PROC, VCD_FUNCTION_IN);
diff --git a/cmake_targets/build_oai b/cmake_targets/build_oai
index 25e9a1c..0b04bb7 100755
--- a/cmake_targets/build_oai
+++ b/cmake_targets/build_oai
@@ -54,6 +54,7 @@ BUILD_DOXYGEN=0
T_TRACER="False"
DISABLE_HARDWARE_DEPENDENCY="False"
CMAKE_BUILD_TYPE=""
+UE_AUTOTEST_TRACE="False"
trap handle_ctrl_c INT
function print_help() {
@@ -130,6 +131,8 @@ Options
Enables the T tracer.
--disable-hardware-dependency
Disable HW dependency during installation
+--ue-autotest-trace
+ Enable specific traces for UE autotest framework
Usage (first build):
oaisim (eNB + UE): ./build_oai -I --oaisim -x --install-system-files
Eurecom EXMIMO + COTS UE : ./build_oai -I --eNB -x --install-system-files
@@ -285,6 +288,10 @@ function main() {
echo_info "Disabling hardware dependency for compiling software"
DISABLE_HARDWARE_DEPENDENCY="True"
shift 1;;
+ --ue-autotest-trace)
+ UE_AUTOTEST_TRACE="True"
+ echo_info "Enabling autotest specific trace for UE"
+ shift 1;;
-h | --help)
print_help
exit 1;;
@@ -457,6 +464,7 @@ function main() {
echo "set (DEADLINE_SCHEDULER \"${DEADLINE_SCHEDULER_FLAG_USER}\" )" >>$cmake_file
echo "set (CPU_AFFINITY \"${CPU_AFFINITY_FLAG_USER}\" )" >>$cmake_file
echo "set ( T_TRACER $T_TRACER )" >> $cmake_file
+ echo "set (UE_AUTOTEST_TRACE $UE_AUTOTEST_TRACE)" >> $cmake_file
echo 'include(${CMAKE_CURRENT_SOURCE_DIR}/../CMakeLists.txt)' >> $cmake_file
cd $DIR/$lte_build_dir/build
cmake ..
This diff is collapsed.
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
<title>OAI5G UE Autotest Report</title>
<script type="text/javascript">
function showhide(id) {
var e = document.getElementById(id);
e.style.display = (e.style.display == 'block') ? 'none' : 'block';
}
</script>
</head>
<style>
table, th, td {
border: 1px solid black;
border-collapse: collapse;
padding: 5px;
}
</style>
<body>
<center>
<h2>OAI5G UE Autotest Report</h2>
</center>
<p>
<table border>
<caption>Test session configuration</caption>
<tr><td>Start time</td><td>{{test_session_start_time}}</td></tr>
<tr><td>Stop time</td><td>{{test_session_stop_time}}</td></tr>
<tr><td>Duration</td><td>{{test_session_duration}}</td></tr>
<tr><td>MTC host</td><td>{{mtc_host}}</td></tr>
<tr><td>User</td><td>{{user}}</td></tr>
<tr><td>Password</td><td>{{password}}</td></tr>
</table>
</p>
<h3>Test Setup</h3>
To be complete
<br></br>
<h3>UE phy-test performances tests results</h3>
<h4>Objectives</h4>
<p>Checks that OAI UE can achieve at least 75 percent of the theoretical throughput.</p>
<p>Tests are done for all MCS (0 to 28) for 5MHz and 10MHz bandwidth.</p>
<h4>Results</h4>
<table>
<TR><TH>ID</TH><TH>TAG</TH><TH>VERDICT</TH><TH>NB RUNS</TH><TH>PASS</TH><TH>FAILED</TH><TH>INCON</TH><TH>SKIPPED</TH><TH>SEG FAULT</TH><TH>TC Timeout</TH><TH>Start</TH><TH>Stop</TH><TH>Duration</TH><TH>Details</TH></TR>
{% for result in test_results|sort(attribute='testcase_name') %}
<TR>
<TD >{{result.testcase_name}}</TD>
<TD align="right">{{result.tags}}</TD>
{% if result.testcase_verdict == "PASS" %}
<TD align="center" style="background-color:green">{{result.testcase_verdict}}</TD>
{% elif result.testcase_verdict == "FAIL" %}
<TD align="center" style="background-color:red">{{result.testcase_verdict}}</TD>
{% else %}
<TD align="center" style="background-color:orange">{{result.testcase_verdict}}</TD>
{% endif %}
<TD align='center'>{{result.nruns}}</TD>
<TD align='center'>{{result.nb_run_pass}}</TD>
<TD align='center'>{{result.nb_run_failed}}</TD>
<TD align='center'>{{result.nb_run_inc}}</TD>
<TD align='center'>{{result.nb_run_skip}}</TD>
<TD align='center'>{{result.nb_seg_fault}}</TD>
<TD >{{result.testcase_timeout}}</TD>
<TD >{{result.testcase_time_start.strftime('%Y-%m-%d %H:%M:%S')}}</TD>
<TD >{{result.testcase_time_stop.strftime('%Y-%m-%d %H:%M:%S')}}</TD>
<TD >{{result.testcase_duration}}</TD>
<TD ><a href="{{ result.testcase_name }}/{{ result.testcase_name }}_report.html">{{ result.testcase_name }}_report.html</a></TD>
</TR>
{% endfor %}
</table>
<br></br>
<h3>UE phy-test stability tests results</h3>
<h4>Objectives</h4>
<p>To be complete</p>
<h4>Results</h4>
To be complete
</table>
</body>
</html>
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
<title>OAI5G UE test case report</title>
<script type="text/javascript">
function showhide(id) {
var e = document.getElementById(id);
e.style.display = (e.style.display == 'block') ? 'none' : 'block';
}
</script>
</head>
<style>
table, th, td {
border: 1px solid black;
border-collapse: collapse;
padding: 5px;
}
</style>
<body>
<center>
<h2>OAI5G UE test case report details</h2>
</center>
<h3>Test Case description</h3>
<p>
<table border>
<tr><td>ID</td><td>{{testcase_name}}</td></tr>
<tr><td>TAG</td><td>{{tags}}</td></tr>
<tr><td>class</td><td>{{testcaseclass}}</td></tr>
<tr><td>description</td><td></td></tr>
<tr><td>timeout</td><td>{{testcase_timeout}}</td></tr>
<tr><td>number of runs</td><td>{{nruns}}</td></tr>
<tr><td>eNB machine</td><td>{{testcase_eNBMachine}}</td></tr>
<tr><td>UE machine</td><td>{{testcase_UEMachine}}</td></tr>
</table>
</p>
<h3>Test Case execution</h3>
<p>
<table border>
<tr><td>testcase_time_start</td><td>{{testcase_time_start}}</td></tr>
<tr><td>testcase_time_stop</td><td>{{testcase_time_stop}}</td></tr>
<tr><td>testcase_duration</td><td>{{testcase_duration}}</td></tr>
<tr><td>Nb runs</td><td>{{nruns}}</td></tr>
<tr><td>Nb PASS</td><td>{{nb_run_pass}}</td></tr>
<tr><td>Nb FAILED</td><td>{{nb_run_failed}}</td></tr>
<tr><td>Nb INCONCLUSIVE</td><td>{{nb_run_inc}}</td></tr>
<tr>
<td>testcase_verdict</td>
{% if testcase_verdict == "PASS" %}
<TD align="center" style="background-color:green">{{testcase_verdict}}</TD>
{% elif testcase_verdict == "FAIL" %}
<TD align="center" style="background-color:red">{{testcase_verdict}}</TD>
{% else %}
<TD align="center" style="background-color:orange">{{testcase_verdict}}</TD>
{% endif %}
</tr>
<tr><td>Nb Seg Fault</td><td>{{nb_seg_fault}}</td></tr>
</table>
</p>
<h3>Test Case runs results</h3>
{% for run_results in runs_results|sort(attribute='run_id') %}
<h4>RUN {{run_results.run_id}} </h4>
<table border>
<tr><td>run_start_time </td><td>{{run_results.run_start_time}}</td></tr>
<tr><td>run_stop_time</td><td>{{run_results.run_stop_time}}</td></tr>
<tr><td>run_duration</td><td>{{run_results.run_duration}}</td></tr>
<tr>
<td>run_verdict</td>
{% if run_results.run_verdict == "PASS" %}
<TD align="center" style="background-color:green">{{run_results.run_verdict}}</TD>
{% elif run_results.run_verdict == "FAIL" %}
<TD align="center" style="background-color:red">{{run_results.run_verdict}}</TD>
{% else %}
<TD align="center" style="background-color:orange">{{run_results.run_verdict}}</TD>
{% endif %}
</tr>
<tr><td>Seg Fault Satus</td>
{% if run_results.ue_seg_fault_status == "NO_SEG_FAULT" %}
<TD align="center" style="background-color:green">{{run_results.ue_seg_fault_status}}</TD>
{% elif run_results.ue_seg_fault_status == "SEG_FAULT" %}
<TD align="center" style="background-color:red">{{run_results.ue_seg_fault_status}}</TD>
{% else %}
<TD align="center" style="background-color:orange">unknown</TD>
{% endif %}
</tr>
</table>
{% for run_metrics in run_results.runs_metrics %}
<br></br>
<table border>
<tr><td>metric_id</td><td>{{run_metrics.metric_id}}</td></tr>
<tr><td>Description</td><td>{{run_metrics.metric_desc}}</td></tr>
<tr><td>Unit of measure</td><td>{{run_metrics.metric_uom}}</td></tr>
<tr><td>metric_min</td><td>{{run_metrics.metric_min}}</td></tr>
<tr><td>metric_max</td><td>{{run_metrics.metric_max}}</td></tr>
<tr><td>metric_mean</td><td>{{run_metrics.metric_mean}}</td></tr>
<tr><td>metric_median</td><td>{{run_metrics.metric_median}}</td></tr>
<tr><td colspan="2"></td></tr>
{% if run_metrics.pass_fail_stat is defined %}
<tr><td>Pass/fail stat</td><td>{{run_metrics.pass_fail_stat}}</td></tr>
{% endif %}
{% if run_metrics.pass_fail_min_limit is defined %}
<tr><td>Pass/fail min limit</td><td>{{run_metrics.pass_fail_min_limit}}</td></tr>
{% endif %}
{% if run_metrics.pass_fail_max_limit is defined %}
<tr><td>Pass/fail max limit</td><td>{{run_metrics.pass_fail_max_limit}}</td></tr>
{% endif %}
<tr><td colspan="2"></td></tr>
<tr><td>metric_fig</td><td><IMG src={{run_metrics.metric_fig}}></td></tr>
</table>
{% endfor %}
{% if run_results.run_traffic.traffic_count != 0 %}
<br></br>
<table border>
<TR><TH>Iperf metric</TH><TH>min</TH><TH>max</TH><TH>mean</TH><TH>median</TH><TR>
<TR><td>Bandwidth </td><td>{{run_results.run_traffic.bw_min}}</td><td>{{run_results.run_traffic.bw_max}}</td><td>{{run_results.run_traffic.bw_mean}}</td><td>{{run_results.run_traffic.bw_median}}</td><TR>
<TR><td>Jitter</td><td>{{run_results.run_traffic.jitter_min}}</td><td>{{run_results.run_traffic.jitter_max}}</td><td>{{run_results.run_traffic.jitter_mean}}</td><td>{{run_results.run_traffic.jitter_median}}</td><TR>
<TR><td>Loss rate</td><td>{{run_results.run_traffic.rl_min}}</td><td>{{run_results.run_traffic.rl_max}}</td><td>{{run_results.run_traffic.rl_mean}}</td><td>{{run_results.run_traffic.rl_median}}</td><TR>
<TR><td colspan="5"></td></TR>
<TR><td>Iperf duration</td><td>{{run_results.run_traffic.iperf_duration}}</td><td></td><td>Pass/Fail criteria (min duration)</td><td>{{run_results.run_traffic.dur_pass_fail_crit}}</td><TR>
<TR><td colspan="5"></td></TR>
<tr><td>traffic_fig</td><td colspan="4"><IMG src={{run_results.run_traffic.traffic_fig}}></td></tr>
</table>
{% endif %}
{% endfor %}
</body>
</html>
This diff is collapsed.
This diff is collapsed.
...@@ -35,12 +35,15 @@ import getopt ...@@ -35,12 +35,15 @@ import getopt
import sys import sys
from subprocess import call from subprocess import call
import encoder
sys.path.append(os.path.expandvars('$OPENAIR_DIR/cmake_targets/autotests/tools/'))
#test_cases = ('030001', '030901', '031001', '031601', '031701', '031801', '031901', '032001', '032101', '032201', '032301', '032501', '032601', '032801') #test_cases = ('030001', '030901', '031001', '031601', '031701', '031801', '031901', '032001', '032101', '032201', '032301', '032501', '032601', '032801')
test_cases = ('030030' , '030030' ) test_cases = ('032800' , '032730' )
nb_run = 3 nb_run = 2
def error_opt(msg): def error_opt(msg):
print("Option error: " + msg) print("Option error: " + msg)
...@@ -58,14 +61,27 @@ def main(args): ...@@ -58,14 +61,27 @@ def main(args):
# metric = {}
# metric['id'] = 'UE_DLSCH_BITRATE'
# metric['description'] = 'UE downlink physical throughput'
# metric['regex'] = '(UE_DLSCH_BITRATE) =\s+(\d+\.\d+) kbps.+frame = (\d+)\)'
# metric['unit_of_meas'] = 'kbps'
# metric['min_limit'] = 14668.8
#AUTOTEST Metric : RRC Measurments RSRP[0]=-97.60 dBm/RE, RSSI=-72.83 dBm, RSRQ[0] 9.03 dB, N0 -125 dBm/RE, NF 7.2 dB (frame = 4490)
metric = {} metric = {}
metric['id'] = 'UE_DLSCH_BITRATE' metric['id'] = 'UE_DL_RRC_MEAS'
metric['description'] = 'UE downlink physical throughput' metric['description'] = 'UE downlink RRC Measurments'
metric['regex'] = '(UE_DLSCH_BITRATE) =\s+(\d+\.\d+) kbps.+frame = (\d+)\)' metric['nb_metric'] = 5
# metric['regex'] = 'AUTOTEST Metric : RRC Measurments (RSRP\[0\])=(-?\d+\.?\d*)\s+(.+),\s+(RSRQ\[0\])=(-?\d+\.?\d*)\s+(.+),,\s+(N0)=(-?\d+\.?\d*)\s+(.+),,\s+(NF)=(-?\d+\.?\d*)\s+(.+)\s+\(frame = (\d+)\) '
metric['regex'] = 'AUTOTEST Metric : RRC Measurments (RSRP\[0\])=(-?\d+\.?\d*)\s+(.+)\,\s+(RSSI)=(-?\d+\.?\d*)\s+(.+)\,\s+(RSRQ\[0\])=(-?\d+\.?\d*)\s+(.+)\,\s+(N0)=(-?\d+\.?\d*)\s+(.+)\,\s+(NF)=(-?\d+\.?\d*)\s+(.+)\s+\(frame = (\d+)\)'
metric['unit_of_meas'] = 'kbps' metric['unit_of_meas'] = 'kbps'
metric['min_limit'] = 14668.8 metric['min_limit'] = 14668.8
#report_path = log_path+'/report/' #report_path = log_path+'/report/'
#os.system(' mkdir -p ' + report_path) #os.system(' mkdir -p ' + report_path)
...@@ -74,58 +90,44 @@ def main(args): ...@@ -74,58 +90,44 @@ def main(args):
#return(0) #return(0)
for test_case in test_cases: test_results = []
# print test_case
if test_case == '030001':
metric['min_limit'] = 500.0
if test_case == '030901':
metric['min_limit'] = 640.0
if test_case == '031001':
metric['min_limit'] = 3200.0
if test_case == '031601':
metric['min_limit'] = 5920.0
if test_case == '031701':
metric['min_limit'] = 6000.0
if test_case == '031801':
metric['min_limit'] = 6200.0
if test_case == '031901':
metric['min_limit'] = 7000.0
if test_case == '032001':
metric['min_limit'] = 7800.0
if test_case == '032101':
metric['min_limit'] = 8000.0
if test_case == '032201':
metric['min_limit'] = 9000.0
if test_case == '032301':
metric['min_limit'] = 10000.0
if test_case == '032501':
metric['min_limit'] = 11000.0
if test_case == '032601':
metric['min_limit'] = 12000.0
if test_case == '032801':
metric['min_limit'] = 12500.0
if test_case == '035201':
metric['min_limit'] = 14668.8
if test_case == '036001':
metric['min_limit'] = 25363.2
for test_case in test_cases:
for i in range(0, nb_run): for i in range(0, nb_run):
fname = 'log//'+test_case+'/run_'+str(i)+'/UE_exec_'+str(i)+'_.log' fname = '..//log//'+test_case+'/run_'+str(i)+'/UE_exec_'+str(i)+'_.log'
args = {'metric' : metric, args = {'metric' : metric,
'file' : fname } 'file' : fname }
cell_synch_status = analyser.check_cell_synchro(fname) # cell_synch_status = analyser.check_cell_synchro(fname)
if cell_synch_status == 'CELL_SYNCH': # if cell_synch_status == 'CELL_SYNCH':
print '!!!!!!!!!!!!!! Cell synchronized !!!!!!!!!!!' # print '!!!!!!!!!!!!!! Cell synchronized !!!!!!!!!!!'
metric_checks_flag = 0 # metric_checks_flag = 0
else : # else :
print '!!!!!!!!!!!!!! Cell NOT NOT synchronized !!!!!!!!!!!' # print '!!!!!!!!!!!!!! Cell NOT NOT synchronized !!!!!!!!!!!'
# metric_extracted = analyser.do_extract_metrics(args) # metrics_extracted = analyser.do_extract_metrics_new(args)
# de-xmlfy test report
xml_file = '..//log//'+test_case+'/test.'+test_case+'_ng.xml'
print xml_file
# test_result =
# test_results.append(test_result)
# xmlFile = logdir_local_testcase + '/test.' + testcasename + '.xml'
# xml="\n<testcase classname=\'"+ testcaseclass + "\' name=\'" + testcasename + "."+tags + "\' Run_result=\'" + test_result_string + "\' time=\'" + str(duration) + " s \' RESULT=\'" + testcase_verdict + "\'></testcase> \n"
# write_file(xmlFile, xml, mode="w")
# xmlFile_ng = logdir_local_testcase + '/test.' + testcasename + '_ng.xml'
# xml_ng = xmlify(test_result, wrap=testcasename, indent=" ")
# write_file(xmlFile_ng, xml_ng, mode="w")
# print "min = "+ str( metric_extracted['metric_min'] ) # print "min = "+ str( metric_extracted['metric_min'] )
# print "min_index = "+ str( metric_extracted['metric_min_index'] ) # print "min_index = "+ str( metric_extracted['metric_min_index'] )
...@@ -143,16 +145,27 @@ def main(args): ...@@ -143,16 +145,27 @@ def main(args):
# print fname # print fname
# analyser.do_img_metrics(metric, metric_extracted, fname) # analyser.do_img_metrics(metric, metric_extracted, fname)
# fname = 'log//'+test_case+'/run_'+str(i)+'/UE_traffic_'+str(i)+'_.log' # fname = 'log//'+test_case+'/run_'+str(i)+'/UE_traffic_'+str(i)+'_.log'
# args = {'file' : fname } # args = {'file' : fname }
# traffic_metrics = analyser.do_extract_traffic_metrics(args) # traffic_metrics = analyser.do_extract_traffic_metrics(args)
# fname= 'report/iperf_'+test_case+'_'+str(i)+'.png' # fname= 'report/iperf_'+test_case+'_'+str(i)+'.png'
# print fname # print fname
# analyser.do_img_traffic(traffic_metrics, fname) # analyser.do_img_traffic(traffic_metrics, fname)
for test_result in test_results:
cmd = 'mkdir -p ' + report_dir + '/'+ test_result['testcase_name']