Commit 044f6657 authored by hardy's avatar hardy

Merge remote-tracking branch 'origin/ci-deploy-asterix' into integration_2021_wk04_b

parents 5db5657d 747695e4
dev Fix_SA_SIB1 NRPRACH_highSpeed_saankhya NRUE_usedlschparallel NR_10MHz NR_DLUL_PF NR_DLUL_PF_4UL NR_DL_MIMO NR_FAPI_beamindex_SSB_RO NR_FAPI_beamindex_SSB_RO_SEMPROJ NR_FDD_FIX NR_FR2_initsync_fixes NR_MAC_Multi_Rach_GlobalEdge NR_MAC_Multi_Rach_GlobalEdge-old NR_MAC_SSB NR_MAC_TCI_UCI_GlobalEdge NR_MCS_BLER NR_RA_cleanup NR_SA_F1AP_5GRECORDS NR_SA_F1AP_5GRECORDS_lts NR_SA_F1AP_RFSIMULATOR NR_SA_F1AP_RFSIMULATOR2 NR_SA_F1AP_RFSIMULATOR2_SRB NR_SA_F1AP_RFSIMULATOR3 NR_SA_F1AP_RFSIMULATOR3_tmp NR_SA_F1AP_RFSIMULATOR3_wf NR_SA_F1AP_RFSIMULATOR_w5GCN NR_SA_itti_sim_wk48 NR_SA_w5GCN_new_gtpu NR_UE_CONFIG_REQ_FIXES NR_UE_SA NR_UL_SCFDMA_100MHz NR_scheduling_CSIRS NR_scheduling_request PBCHNRTCFIX RFquality Saankhya_NRPRACH_HighSpeed add-dmrs-test benetel_config_file_fix benetel_driver_update benetel_fixes bsr-fix bugfix-free-ra-process bugfix-nr-t-reordering bugfix-x2-SgNBAdditionRequest ci-new-docker-pipeline ci-reduce-nb-vms ci_benetel_test ci_phytest ci_quectel_support ci_test_ra_fr2 ci_vm_resource_fix develop develop-CBRA-v3 develop-NR_SA_F1AP_5GRECORDS develop-NR_SA_F1AP_5GRECORDS-v3 develop-SA-CBRA develop-SA-CBRA-CUDU develop-SA-CBRA-Msg5 develop-SA-CBRA-lts develop-SA-CBRA-ulsch-lts develop-SA-RA develop-sib1-update-ue disable_CSI_measrep docker-improvements-2021-april docker-no-cache-option enhance-rfsim episys-merge episys/nsa_development fft_bench_hotfix fix-nr-pdcp-timer fix-nr-rlc-range-nack fix-quectel fix-realtime fix-x2-without-gnb fix_NR_DLUL_PF fix_NR_DLUL_PF_benchmark fix_coreset_dmrs_idx fix_rb_corruption fix_reestablishment fixgtpu git-dashboard gnb-freerun-txru gnb-n300-fixes gnb-realtime-hotfix gnb-realtime-quickfix gnb-threadpool hack-exit-gnb-when-no-enb-nsa integ-w13-test-rt-issue integration_2020_wk15 integration_2021_wk05 integration_2021_wk06 integration_2021_wk06_MR978 integration_2021_wk06_b integration_2021_wk06_c integration_2021_wk08 integration_2021_wk08_2 integration_2021_wk08_MR963 integration_2021_wk09 integration_2021_wk09_b integration_2021_wk10 integration_2021_wk10_b integration_2021_wk11 integration_2021_wk12 integration_2021_wk12_b integration_2021_wk13_a integration_2021_wk13_b integration_2021_wk13_b_fix_tdas integration_2021_wk13_b_fixed integration_2021_wk13_c integration_2021_wk14_a integration_2021_wk15_a integration_2021_wk16 integration_2021_wk17_a integration_w5GC_CBRA_test inter-RRU-final migrate-cpp-check-container msg4_phy_0303_lfq multiple_ssb_sib1_bugfix nasmesh_kernel_5.8 new-gtpu nfapi_nr_arch_mod nfapi_nr_develop nfapi_nr_develop_new nr-bsr-fix nr-dl-mimo-2layer nr-dmrs-fixes nr-pdcp-improvements nr-pdcp-nea2-security nr-pdcp-nia2-integrity nr-pdcp-srb-integrity nr-ra-fix nr-stats-print nrPBCHTCFix nrPbchTcFix nr_power_measurement_fixes nr_ue_pdcp_fix oairu oairu-dockerfile-support physim-build-deploy prb_based_dl_channel_estimation ptrs_rrc_config recursive-cmake rh_ci_add_runtime_stats rh_ci_add_uldlharq_stats rh_ci_gsheet_rt_monitoring rh_ci_nsa2jenkins rh_ci_nsa_test_n310 rh_ci_phy_test_improve rh_ci_ra_fr2 rohan_ulsim2RxFix s1_subnormal sanitize-address sanitize-v1 sanitize-v1-tmp sarma_pvnp_oai sim-channels small_nr_bugfixes t-gnb-tracer test-panos test_nsa_gtpu_fix test_rt-fix_phy-test ue-dci-false-detection ue-fixes ue-pdsch-pusch-parallel ue-race-fix usrp_x400 wf-sa-rrc wk11-with-phytest xw2 2021.wk14_a 2021.wk13_d 2021.wk13_c 2021.w16 2021.w15 2021.w14 2021.w13_a 2021.w12 2021.w11 2021.w10 2021.w09 2021.w08 2021.w06 2021.w05 2021.w04 benetel_gnb_rel_1.0 benetel_enb_rel_1.0
No related merge requests found
...@@ -37,6 +37,7 @@ import logging ...@@ -37,6 +37,7 @@ import logging
import os import os
import time import time
from multiprocessing import Process, Lock, SimpleQueue from multiprocessing import Process, Lock, SimpleQueue
from zipfile import ZipFile
#----------------------------------------------------------- #-----------------------------------------------------------
# OAI Testing modules # OAI Testing modules
...@@ -80,7 +81,11 @@ class Containerize(): ...@@ -80,7 +81,11 @@ class Containerize():
self.flexranCtrlDeployed = False self.flexranCtrlDeployed = False
self.flexranCtrlIpAddress = '' self.flexranCtrlIpAddress = ''
self.cli = ''
self.dockerfileprefix = ''
self.host = ''
self.allImagesSize = {}
self.collectInfo = {}
#----------------------------------------------------------- #-----------------------------------------------------------
# Container management functions # Container management functions
#----------------------------------------------------------- #-----------------------------------------------------------
...@@ -111,9 +116,20 @@ class Containerize(): ...@@ -111,9 +116,20 @@ class Containerize():
mySSH = SSH.SSHConnection() mySSH = SSH.SSHConnection()
mySSH.open(lIpAddr, lUserName, lPassWord) mySSH.open(lIpAddr, lUserName, lPassWord)
# Checking the hostname to get adapted on cli and dockerfileprefixes
mySSH.command('hostnamectl', '\$', 5)
result = re.search('Ubuntu|Red Hat', mySSH.getBefore())
self.host = result.group(0)
if self.host == 'Ubuntu':
self.cli = 'docker'
self.dockerfileprefix = '.ubuntu18'
elif self.host == 'Red Hat':
self.cli = 'podman'
self.dockerfileprefix = '.rhel8.2'
imageNames = [] imageNames = []
result = re.search('eNB', self.imageKind) result = re.search('eNB', self.imageKind)
# Creating a tupple with the imageName and the DockerFile prefix pattern # Creating a tupple with the imageName and the DockerFile prefix pattern on obelix
if result is not None: if result is not None:
imageNames.append(('oai-enb', 'eNB')) imageNames.append(('oai-enb', 'eNB'))
else: else:
...@@ -129,10 +145,13 @@ class Containerize(): ...@@ -129,10 +145,13 @@ class Containerize():
imageNames.append(('oai-nr-ue', 'nrUE')) imageNames.append(('oai-nr-ue', 'nrUE'))
if len(imageNames) == 0: if len(imageNames) == 0:
imageNames.append(('oai-enb', 'eNB')) imageNames.append(('oai-enb', 'eNB'))
# Workaround for some servers, we need to erase completely the workspace # Workaround for some servers, we need to erase completely the workspace
if self.forcedWorkspaceCleanup: if self.forcedWorkspaceCleanup:
mySSH.command('echo ' + lPassWord + ' | sudo -S rm -Rf ' + lSourcePath, '\$', 15) mySSH.command('echo ' + lPassWord + ' | sudo -S rm -Rf ' + lSourcePath, '\$', 15)
self.testCase_id = HTML.testCase_id self.testCase_id = HTML.testCase_id
# on RedHat/CentOS .git extension is mandatory # on RedHat/CentOS .git extension is mandatory
result = re.search('([a-zA-Z0-9\:\-\.\/])+\.git', self.ranRepository) result = re.search('([a-zA-Z0-9\:\-\.\/])+\.git', self.ranRepository)
if result is not None: if result is not None:
...@@ -162,97 +181,156 @@ class Containerize(): ...@@ -162,97 +181,156 @@ class Containerize():
else: else:
logging.debug('Merging with the target branch: ' + self.ranTargetBranch) logging.debug('Merging with the target branch: ' + self.ranTargetBranch)
mySSH.command('git merge --ff origin/' + self.ranTargetBranch + ' -m "Temporary merge for CI"', '\$', 5) mySSH.command('git merge --ff origin/' + self.ranTargetBranch + ' -m "Temporary merge for CI"', '\$', 5)
# if asterix, copy the entitlement and subscription manager configurations
if self.host == 'Red Hat':
mySSH.command('mkdir -p tmp/ca/', '\$', 5)
mySSH.command('mkdir -p tmp/entitlement/', '\$', 5)
mySSH.command('sudo cp /etc/rhsm/ca/redhat-uep.pem tmp/ca/', '\$', 5)
mySSH.command('sudo cp /etc/pki/entitlement/*.pem tmp/entitlement/', '\$', 5)
#mySSH.close()
#return 0
sharedimage = 'ran-build'
# Let's remove any previous run artifacts if still there # Let's remove any previous run artifacts if still there
mySSH.command('docker image prune --force', '\$', 5) mySSH.command(self.cli + ' image prune --force', '\$', 5)
mySSH.command('docker image rm ran-build:' + imageTag, '\$', 5) mySSH.command(self.cli + ' image rm ' + sharedimage + ':' + imageTag, '\$', 5)
for image,pattern in imageNames: for image,pattern in imageNames:
mySSH.command('docker image rm ' + image + ':' + imageTag, '\$', 5) mySSH.command(self.cli + ' image rm ' + image + ':' + imageTag, '\$', 5)
# Build the shared image # Build the shared image
mySSH.command('docker build --target ran-build --tag ran-build:' + imageTag + ' --file docker/Dockerfile.ran.ubuntu18 --build-arg NEEDED_GIT_PROXY="http://proxy.eurecom.fr:8080" . > cmake_targets/log/ran-build.log 2>&1', '\$', 800) mySSH.command(self.cli + ' build --target ' + sharedimage + ' --tag ' + sharedimage + ':' + imageTag + ' --file docker/Dockerfile.ran' + self.dockerfileprefix + ' --build-arg NEEDED_GIT_PROXY="http://proxy.eurecom.fr:8080" . > cmake_targets/log/ran-build.log 2>&1', '\$', 1600)
# Build the target image(s) # Build the target image(s)
previousImage='ran-build:' + imageTag previousImage = sharedimage + ':' + imageTag
danglingShaOnes=[] danglingShaOnes=[]
for image,pattern in imageNames: for image,pattern in imageNames:
# the archived Dockerfiles have "ran-build:latest" as base image # the archived Dockerfiles have "ran-build:latest" as base image
# we need to update them with proper tag # we need to update them with proper tag
mySSH.command('sed -i -e "s#ran-build:latest#ran-build:' + imageTag + '#" docker/Dockerfile.' + pattern + '.ubuntu18', '\$', 5) mySSH.command('sed -i -e "s#' + sharedimage + ':latest#' + sharedimage + ':' + imageTag + '#" docker/Dockerfile.' + pattern + self.dockerfileprefix, '\$', 5)
mySSH.command('docker build --target ' + image + ' --tag ' + image + ':' + imageTag + ' --file docker/Dockerfile.' + pattern + '.ubuntu18 . > cmake_targets/log/' + image + '.log 2>&1', '\$', 600) mySSH.command(self.cli + ' build --target ' + image + ' --tag ' + image + ':' + imageTag + ' --file docker/Dockerfile.' + pattern + self.dockerfileprefix + ' . > cmake_targets/log/' + image + '.log 2>&1', '\$', 1200)
# Retrieving the dangling image(s) for the log collection # Retrieving the dangling image(s) for the log collection
mySSH.command('docker images --filter "dangling=true" --filter "since=' + previousImage + '" -q | sed -e "s#^#sha=#"', '\$', 5) mySSH.command(self.cli + ' images --filter "dangling=true" --filter "since=' + previousImage + '" -q | sed -e "s#^#sha=#"', '\$', 5)
result = re.search('sha=(?P<imageShaOne>[a-zA-Z0-9\-\_]+)', mySSH.getBefore()) result = re.search('sha=(?P<imageShaOne>[a-zA-Z0-9\-\_]+)', mySSH.getBefore())
if result is not None: if result is not None:
danglingShaOnes.append((image, result.group('imageShaOne'))) danglingShaOnes.append((image, result.group('imageShaOne')))
previousImage=image + ':' + imageTag previousImage = image + ':' + imageTag
imageTag = 'ci-temp' imageTag = 'ci-temp'
# First verify if images were properly created. # First verify if images were properly created.
status = True status = True
mySSH.command('docker image inspect --format=\'Size = {{.Size}} bytes\' ran-build:' + imageTag, '\$', 5) mySSH.command(self.cli + ' image inspect --format=\'Size = {{.Size}} bytes\' ' + sharedimage + ':' + imageTag, '\$', 5)
if mySSH.getBefore().count('No such object') != 0: if mySSH.getBefore().count('No such object') != 0:
logging.error('Could not build properly ran-build') logging.error('Could not build properly ran-build')
status = False status = False
else: else:
result = re.search('Size = (?P<size>[0-9\-]+) bytes', mySSH.getBefore()) result = re.search('Size *= *(?P<size>[0-9\-]+) *bytes', mySSH.getBefore())
if result is not None: if result is not None:
imageSize = float(result.group('size')) imageSize = float(result.group('size'))
imageSize = imageSize / 1000 imageSize = imageSize / 1000
if imageSize < 1000: if imageSize < 1000:
logging.debug('\u001B[1m ran-build size is ' + ('%.0f' % imageSize) + ' kbytes\u001B[0m') logging.debug('\u001B[1m ran-build size is ' + ('%.0f' % imageSize) + ' kbytes\u001B[0m')
self.allImagesSize['ran-build'] = str(round(imageSize,1)) + ' kbytes'
else: else:
imageSize = imageSize / 1000 imageSize = imageSize / 1000
if imageSize < 1000: if imageSize < 1000:
logging.debug('\u001B[1m ran-build size is ' + ('%.0f' % imageSize) + ' Mbytes\u001B[0m') logging.debug('\u001B[1m ran-build size is ' + ('%.0f' % imageSize) + ' Mbytes\u001B[0m')
self.allImagesSize['ran-build'] = str(round(imageSize,1)) + ' Mbytes'
else: else:
imageSize = imageSize / 1000 imageSize = imageSize / 1000
logging.debug('\u001B[1m ran-build size is ' + ('%.3f' % imageSize) + ' Gbytes\u001B[0m') logging.debug('\u001B[1m ran-build size is ' + ('%.3f' % imageSize) + ' Gbytes\u001B[0m')
self.allImagesSize['ran-build'] = str(round(imageSize,1)) + ' Gbytes'
else: else:
logging.debug('ran-build size is unknown') logging.debug('ran-build size is unknown')
for image,pattern in imageNames: for image,pattern in imageNames:
mySSH.command('docker image inspect --format=\'Size = {{.Size}} bytes\' ' + image + ':' + imageTag, '\$', 5) mySSH.command(self.cli + ' image inspect --format=\'Size = {{.Size}} bytes\' ' + image + ':' + imageTag, '\$', 5)
if mySSH.getBefore().count('No such object') != 0: if mySSH.getBefore().count('No such object') != 0:
logging.error('Could not build properly ' + image) logging.error('Could not build properly ' + image)
status = False status = False
else: else:
result = re.search('Size = (?P<size>[0-9\-]+) bytes', mySSH.getBefore()) result = re.search('Size *= *(?P<size>[0-9\-]+) *bytes', mySSH.getBefore())
if result is not None: if result is not None:
imageSize = float(result.group('size')) imageSize = float(result.group('size'))
imageSize = imageSize / 1000 imageSize = imageSize / 1000
if imageSize < 1000: if imageSize < 1000:
logging.debug('\u001B[1m ' + image + ' size is ' + ('%.0f' % imageSize) + ' kbytes\u001B[0m') logging.debug('\u001B[1m ' + image + ' size is ' + ('%.0f' % imageSize) + ' kbytes\u001B[0m')
self.allImagesSize[image] = str(round(imageSize,1)) + ' kbytes'
else: else:
imageSize = imageSize / 1000 imageSize = imageSize / 1000
if imageSize < 1000: if imageSize < 1000:
logging.debug('\u001B[1m ' + image + ' size is ' + ('%.0f' % imageSize) + ' Mbytes\u001B[0m') logging.debug('\u001B[1m ' + image + ' size is ' + ('%.0f' % imageSize) + ' Mbytes\u001B[0m')
self.allImagesSize[image] = str(round(imageSize,1)) + ' Mbytes'
else: else:
imageSize = imageSize / 1000 imageSize = imageSize / 1000
logging.debug('\u001B[1m ' + image + ' size is ' + ('%.3f' % imageSize) + ' Gbytes\u001B[0m') logging.debug('\u001B[1m ' + image + ' size is ' + ('%.3f' % imageSize) + ' Gbytes\u001B[0m')
self.allImagesSize[image] = str(round(imageSize,1)) + ' Gbytes'
else: else:
logging.debug('ran-build size is unknown') logging.debug('ran-build size is unknown')
if not status: if not status:
mySSH.close() mySSH.close()
logging.error('\u001B[1m Building OAI Images Failed\u001B[0m') logging.error('\u001B[1m Building OAI Images Failed\u001B[0m')
HTML.CreateHtmlTestRow(self.imageKind, 'KO', CONST.ALL_PROCESSES_OK) HTML.CreateHtmlTestRow(self.imageKind, 'KO', CONST.ALL_PROCESSES_OK)
#HTML.CreateHtmlNextTabHeaderTestRow(self.collectInfo, self.allImagesSize)
HTML.CreateHtmlTabFooter(False) HTML.CreateHtmlTabFooter(False)
sys.exit(1) sys.exit(1)
# Recover build logs, for the moment only possible when build is successful # Recover build logs, for the moment only possible when build is successful
mySSH.command('docker create --name test ran-build:' + imageTag, '\$', 5) mySSH.command(self.cli + ' create --name test ' + sharedimage + ':' + imageTag, '\$', 5)
mySSH.command('mkdir -p cmake_targets/log/ran-build', '\$', 5) mySSH.command('mkdir -p cmake_targets/log/ran-build', '\$', 5)
mySSH.command('docker cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/ran-build', '\$', 5) mySSH.command(self.cli + ' cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/ran-build', '\$', 5)
mySSH.command('docker rm -f test', '\$', 5) mySSH.command(self.cli + ' rm -f test', '\$', 5)
for image,shaone in danglingShaOnes: for image,shaone in danglingShaOnes:
mySSH.command('mkdir -p cmake_targets/log/' + image, '\$', 5) mySSH.command('mkdir -p cmake_targets/log/' + image, '\$', 5)
mySSH.command('docker create --name test ' + shaone, '\$', 5) mySSH.command(self.cli + ' create --name test ' + shaone, '\$', 5)
mySSH.command('docker cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/' + image, '\$', 5) mySSH.command(self.cli + ' cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/' + image, '\$', 5)
mySSH.command('docker rm -f test', '\$', 5) mySSH.command(self.cli + ' rm -f test', '\$', 5)
mySSH.command('docker image prune --force', '\$', 5) mySSH.command(self.cli + ' image prune --force', '\$', 5)
mySSH.command('cd cmake_targets', '\$', 5) mySSH.command('cd cmake_targets', '\$', 5)
mySSH.command('mkdir -p build_log_' + self.testCase_id, '\$', 5) mySSH.command('mkdir -p build_log_' + self.testCase_id, '\$', 5)
mySSH.command('mv log/* ' + 'build_log_' + self.testCase_id, '\$', 5) mySSH.command('mv log/* ' + 'build_log_' + self.testCase_id, '\$', 5)
#mySSH.close()
mySSH.command('cd /tmp/CI-eNB/cmake_targets', '\$', 5)
if (os.path.isfile('./build_log_' + self.testCase_id + '.zip')):
os.remove('./build_log_' + self.testCase_id + '.zip')
mySSH.command('zip -r -qq build_log_' + self.testCase_id + '.zip build_log_' + self.testCase_id, '\$', 5)
mySSH.copyin(lIpAddr, lUserName, lPassWord, lSourcePath + '/cmake_targets/build_log_' + self.testCase_id + '.zip', '.')
#mySSH.command('rm -f build_log_' + self.testCase_id + '.zip','\$', 5)
mySSH.close() mySSH.close()
ZipFile('build_log_' + self.testCase_id + '.zip').extractall('.')
#Trying to identify the errors and warnings for each built images
imageNames1 = imageNames
shared = ('ran-build','ran')
imageNames1.insert(0, shared)
for image,pattern in imageNames1:
files = {}
file_list = [f for f in os.listdir('build_log_' + self.testCase_id + '/' + image) if os.path.isfile(os.path.join('build_log_' + self.testCase_id + '/' + image, f)) and f.endswith('.txt')]
for fil in file_list:
errorandwarnings = {}
warningsNo = 0
errorsNo = 0
with open('build_log_{}/{}/{}'.format(self.testCase_id,image,fil), mode='r') as inputfile:
for line in inputfile:
result = re.search(' ERROR ', str(line))
if result is not None:
errorsNo += 1
result = re.search(' error:', str(line))
if result is not None:
errorsNo += 1
result = re.search(' WARNING ', str(line))
if result is not None:
warningsNo += 1
result = re.search(' warning:', str(line))
if result is not None:
warningsNo += 1
errorandwarnings['errors'] = errorsNo
errorandwarnings['warnings'] = warningsNo
errorandwarnings['status'] = status
files[fil] = errorandwarnings
self.collectInfo[image] = files
logging.info('\u001B[1m Building OAI Image(s) Pass\u001B[0m') logging.info('\u001B[1m Building OAI Image(s) Pass\u001B[0m')
HTML.CreateHtmlTestRow(self.imageKind, 'OK', CONST.ALL_PROCESSES_OK) HTML.CreateHtmlTestRow(self.imageKind, 'OK', CONST.ALL_PROCESSES_OK)
HTML.CreateHtmlNextTabHeaderTestRow(self.collectInfo, self.allImagesSize)
def DeployObject(self, HTML, EPC): def DeployObject(self, HTML, EPC):
if self.eNB_serverId[self.eNB_instance] == '0': if self.eNB_serverId[self.eNB_instance] == '0':
......
...@@ -218,7 +218,7 @@ class HTMLManagement(): ...@@ -218,7 +218,7 @@ class HTMLManagement():
self.htmlFile.write(' <div id="build-tab" class="tab-pane fade">\n') self.htmlFile.write(' <div id="build-tab" class="tab-pane fade">\n')
self.htmlFile.write(' <table class="table" border = "1">\n') self.htmlFile.write(' <table class="table" border = "1">\n')
self.htmlFile.write(' <tr bgcolor = "#33CCFF" >\n') self.htmlFile.write(' <tr bgcolor = "#33CCFF" >\n')
self.htmlFile.write(' <th>Relative Time (ms)</th>\n') self.htmlFile.write(' <th>Relative Time (s)</th>\n')
self.htmlFile.write(' <th>Test Id</th>\n') self.htmlFile.write(' <th>Test Id</th>\n')
self.htmlFile.write(' <th>Test Desc</th>\n') self.htmlFile.write(' <th>Test Desc</th>\n')
self.htmlFile.write(' <th>Test Options</th>\n') self.htmlFile.write(' <th>Test Options</th>\n')
...@@ -399,6 +399,53 @@ class HTMLManagement(): ...@@ -399,6 +399,53 @@ class HTMLManagement():
self.htmlFile.write(' </tr>\n') self.htmlFile.write(' </tr>\n')
self.htmlFile.close() self.htmlFile.close()
def CreateHtmlNextTabHeaderTestRow(self, collectInfo, allImagesSize, machine='eNB'):
if (self.htmlFooterCreated or (not self.htmlHeaderCreated)):
return
self.htmlFile = open('test_results.html', 'a')
if bool(collectInfo) == False:
self.htmlFile.write(' <tr bgcolor = "red" >\n')
self.htmlFile.write(' <td colspan=' + str(5+self.htmlUEConnected) + '><b> ----IMAGES BUILDING FAILED - Unable to recover the image logs ---- </b></td>\n')
self.htmlFile.write(' </tr>\n')
else:
for image in collectInfo:
files = collectInfo[image]
# TabHeader for image logs on built shared and target images
self.htmlFile.write(' <tr bgcolor = "#F0F0F0" >\n')
self.htmlFile.write(' <td colspan=' + str(5+self.htmlUEConnected) + '><b> ---- ' + image + ' IMAGE STATUS ----> Size ' + allImagesSize[image] + ' </b></td>\n')
self.htmlFile.write(' </tr>\n')
self.htmlFile.write(' <tr bgcolor = "#33CCFF" >\n')
self.htmlFile.write(' <th colspan="2">Element</th>\n')
self.htmlFile.write(' <th>Nb Errors</th>\n')
self.htmlFile.write(' <th>Nb Warnings</th>\n')
self.htmlFile.write(' <th colspan=' + str(1+self.htmlUEConnected) + '>Status</th>\n')
self.htmlFile.write(' </tr>\n')
for fil in files:
parameters = files[fil]
# TestRow for image logs on built shared and target images
self.htmlFile.write(' <tr>\n')
self.htmlFile.write(' <td colspan="2" bgcolor = "lightcyan" >' + fil + ' </td>\n')
if (parameters['errors'] == 0):
self.htmlFile.write(' <td bgcolor = "green" >' + str(parameters['errors']) + '</td>\n')
else:
self.htmlFile.write(' <td bgcolor = "red" >' + str(parameters['errors']) + '</td>\n')
if (parameters['warnings'] == 0):
self.htmlFile.write(' <td bgcolor = "green" >' + str(parameters['warnings']) + '</td>\n')
elif ((parameters['warnings'] > 0) and (parameters['warnings'] <= 20)):
self.htmlFile.write(' <td bgcolor = "orange" >' + str(parameters['warnings']) + '</td>\n')
else:
self.htmlFile.write(' <td bgcolor = "red" >' + str(parameters['warnings']) + '</td>\n')
if (parameters['errors'] == 0) and (parameters['warnings'] == 0):
self.htmlFile.write(' <th colspan=' + str(1+self.htmlUEConnected) + ' bgcolor = "green" ><font color="white">OK </font></th>\n')
elif (parameters['errors'] == 0) and ((parameters['warnings'] > 0) and (parameters['warnings'] <= 20)):
self.htmlFile.write(' <th colspan=' + str(1+self.htmlUEConnected) + ' bgcolor = "orange" ><font color="white">OK </font></th>\n')
else:
self.htmlFile.write(' <th colspan=' + str(1+self.htmlUEConnected) + ' bgcolor = "red" > NOT OK </th>\n')
self.htmlFile.write(' </tr>\n')
self.htmlFile.close()
def CreateHtmlTestRowQueue(self, options, status, ue_status, ue_queue): def CreateHtmlTestRowQueue(self, options, status, ue_status, ue_queue):
if ((not self.htmlFooterCreated) and (self.htmlHeaderCreated)): if ((not self.htmlFooterCreated) and (self.htmlHeaderCreated)):
self.htmlFile = open('test_results.html', 'a') self.htmlFile = open('test_results.html', 'a')
......
...@@ -52,13 +52,11 @@ class SSHConnection(): ...@@ -52,13 +52,11 @@ class SSHConnection():
self.picocom_closure = True self.picocom_closure = True
def open(self, ipaddress, username, password): def open(self, ipaddress, username, password):
extraSshOptions = ''
count = 0 count = 0
connect_status = False connect_status = False
if ipaddress == '192.168.18.197':
extraSshOptions = ' -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
while count < 4: while count < 4:
self.ssh = pexpect.spawn('ssh', [username + '@' + ipaddress + extraSshOptions], timeout = 5) self.ssh = pexpect.spawn('ssh -o PubkeyAuthentication=no {}@{}'.format(username,ipaddress))
self.ssh.timeout = 5
self.sshresponse = self.ssh.expect(['Are you sure you want to continue connecting (yes/no)?', 'password:', 'Last login', pexpect.EOF, pexpect.TIMEOUT]) self.sshresponse = self.ssh.expect(['Are you sure you want to continue connecting (yes/no)?', 'password:', 'Last login', pexpect.EOF, pexpect.TIMEOUT])
if self.sshresponse == 0: if self.sshresponse == 0:
self.ssh.sendline('yes') self.ssh.sendline('yes')
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment