Commit fe330f7b authored by Raphael Defosseux's avatar Raphael Defosseux

Merge branch 'ci-new-docker-pipeline' into 'develop'

Ci new docker pipeline

See merge request oai/openairinterface5g!1095
parents 3dc0a098 6f262290
#!/bin/groovy
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the OAI Public License, Version 1.1 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.openairinterface.org/?page_id=698
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* contact@openairinterface.org
*/
// Location of the executor node
def nodeExecutor = params.nodeExecutor
// Tags to shorten pipeline duration
def doMandatoryTests = false
def doFullTestsuite = false
pipeline {
agent {
label nodeExecutor
}
options {
disableConcurrentBuilds()
timestamps()
gitLabConnection('OAI GitLab')
ansiColor('xterm')
}
stages {
stage ("Verify Parameters") {
steps {
script {
JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
JOB_TIMESTAMP = JOB_TIMESTAMP.trim()
echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
def allParametersPresent = true
echo '\u2705 \u001B[32mVerify Labels\u001B[0m'
if ("MERGE".equals(env.gitlabActionType)) {
LABEL_CHECK = sh returnStdout: true, script: 'ci-scripts/checkGitLabMergeRequestLabels.sh --mr-id ' + env.gitlabMergeRequestIid
LABEL_CHECK = LABEL_CHECK.trim()
if (LABEL_CHECK == 'NONE') {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): Your merge request has none of the mandatory labels:\n\n"
message += " - BUILD-ONLY\n"
message += " - 4G-LTE\n"
message += " - 5G-NR\n"
message += " - CI\n\n"
message += "Not performing CI due to lack of labels"
addGitLabMRComment comment: message
error('Not performing CI due to lack of labels')
} else if (LABEL_CHECK == 'FULL') {
doMandatoryTests = true
doFullTestsuite = true
} else if (LABEL_CHECK == 'SHORTEN-5G') {
doMandatoryTests = true
} else {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): We will perform only build stages on your Merge Request"
addGitLabMRComment comment: message
}
} else {
doMandatoryTests = true
doFullTestsuite = true
}
}
}
}
stage ("Verify Guidelines") {
steps {
echo "Git URL is ${GIT_URL}"
echo "GitLab Act is ${env.gitlabActionType}"
script {
if ("MERGE".equals(env.gitlabActionType)) {
// since a bit, in push events, gitlabUserEmail is not populated
gitCommitAuthorEmailAddr = env.gitlabUserEmail
echo "GitLab Usermail is ${gitCommitAuthorEmailAddr}"
// GitLab-Jenkins plugin integration is lacking to perform the merge by itself
// Doing it manually --> it may have merge conflicts
sh "./ci-scripts/doGitLabMerge.sh --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}"
} else {
echo "Git Branch is ${GIT_BRANCH}"
echo "Git Commit is ${GIT_COMMIT}"
// since a bit, in push events, gitlabUserEmail is not populated
gitCommitAuthorEmailAddr = sh returnStdout: true, script: 'git log -n1 --pretty=format:%ae ${GIT_COMMIT}'
gitCommitAuthorEmailAddr = gitCommitAuthorEmailAddr.trim()
echo "GitLab Usermail is ${gitCommitAuthorEmailAddr}"
sh "git log -n1 --pretty=format:\"%s\" > .git/CI_COMMIT_MSG"
}
}
}
post {
failure {
script {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): Merge Conflicts -- Cannot perform CI"
addGitLabMRComment comment: message
currentBuild.result = 'FAILURE'
}
}
}
}
// Build Stages are Mandatory
// Later we will add a Ubuntu20 build
stage ("Image Building Processes") {
parallel {
stage ("Ubuntu18 Build") {
steps {
script {
triggerSlaveJob ('RAN-Ubuntu18-Image-Builder', 'Ubuntu18-Images-Build')
}
}
post {
always {
script {
finalizeSlaveJob('RAN-Ubuntu18-Image-Builder')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
}
}
}
}
stage ("RHEL8 Build") {
steps {
script {
triggerSlaveJob ('RAN-RHEL8-Image-Builder', 'RHEL8-Images-Build')
}
}
post {
always {
script {
finalizeSlaveJob('RAN-RHEL8-Image-Builder')
}
}
failure {
script {
currentBuild.result = 'FAILURE'
}
}
}
}
}
}
}
post {
always {
script {
emailext attachmentsPattern: '*results*.html',
body: '''Hi,
Here are attached HTML report files for $PROJECT_NAME - Build # $BUILD_NUMBER - $BUILD_STATUS!
Regards,
OAI CI Team''',
replyTo: 'no-reply@openairinterface.org',
subject: '$PROJECT_NAME - Build # $BUILD_NUMBER - $BUILD_STATUS!',
to: gitCommitAuthorEmailAddr
if (fileExists('.git/CI_COMMIT_MSG')) {
sh "rm -f .git/CI_COMMIT_MSG"
}
}
}
success {
script {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): passed (" + BUILD_URL + ")"
if ("MERGE".equals(env.gitlabActionType)) {
echo "This is a MERGE event"
addGitLabMRComment comment: message
}
}
}
failure {
script {
def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): failed (" + BUILD_URL + ")"
if ("MERGE".equals(env.gitlabActionType)) {
echo "This is a MERGE event"
addGitLabMRComment comment: message
}
}
}
}
}
// ---- Slave Job functions
def triggerSlaveJob (jobName, gitlabStatusName) {
// Workaround for the "cancelled" GitLab pipeline notification
// The slave job is triggered with the propagate false so the following commands are executed
// Its status is now PASS/SUCCESS from a stage pipeline point of view
// localStatus variable MUST be analyzed to properly assess the status
localStatus = build job: jobName,
parameters: [
string(name: 'eNB_Repository', value: String.valueOf(GIT_URL)),
string(name: 'eNB_Branch', value: String.valueOf(env.gitlabSourceBranch)),
string(name: 'eNB_CommitID', value: String.valueOf(env.gitlabMergeRequestLastCommit)),
booleanParam(name: 'eNB_mergeRequest', value: "MERGE".equals(env.gitlabActionType)),
string(name: 'eNB_TargetBranch', value: String.valueOf(env.gitlabTargetBranch))
], propagate: false
localResult = localStatus.getResult()
echo "${jobName} Slave Job status is ${localResult}"
gitlabCommitStatus(name: gitlabStatusName) {
if (localStatus.resultIsBetterOrEqualTo('SUCCESS')) {
echo "${jobName} Slave Job is OK"
} else {
echo "${jobName} Slave Job is KO"
sh "ci-scripts/fail.sh"
}
}
}
def triggerSlaveJobNoGitLab (jobName) {
// Workaround for the "cancelled" GitLab pipeline notification
// The slave job is triggered with the propagate false so the following commands are executed
// Its status is now PASS/SUCCESS from a stage pipeline point of view
// localStatus variable MUST be analyzed to properly assess the status
localStatus = build job: jobName,
parameters: [
string(name: 'eNB_Repository', value: String.valueOf(GIT_URL)),
string(name: 'eNB_Branch', value: String.valueOf(env.gitlabSourceBranch)),
string(name: 'eNB_CommitID', value: String.valueOf(env.gitlabMergeRequestLastCommit)),
booleanParam(name: 'eNB_mergeRequest', value: "MERGE".equals(env.gitlabActionType)),
string(name: 'eNB_TargetBranch', value: String.valueOf(env.gitlabTargetBranch))
], propagate: false
localResult = localStatus.getResult()
echo "${jobName} Slave Job status is ${localResult}"
if (localStatus.resultIsBetterOrEqualTo('SUCCESS')) {
echo "${jobName} Slave Job is OK"
} else {
echo "${jobName} Slave Job is KO"
sh "ci-scripts/fail.sh"
}
}
def finalizeSlaveJob(jobName) {
// In case of any non-success, we are retrieving the HTML report of the last completed
// slave job. The only drop-back is that we may retrieve the HTML report of a previous build
fileName = "test_results-${jobName}.html"
if (!fileExists(fileName)) {
copyArtifacts(projectName: jobName,
filter: 'test_results*.html',
selector: lastCompleted())
if (fileExists(fileName)) {
sh "sed -i -e 's#TEMPLATE_BUILD_TIME#${JOB_TIMESTAMP}#' ${fileName}"
archiveArtifacts artifacts: fileName
}
}
}
...@@ -241,6 +241,10 @@ pipeline { ...@@ -241,6 +241,10 @@ pipeline {
stage ("Terminate") { stage ("Terminate") {
parallel { parallel {
stage('Terminate UE') { stage('Terminate UE') {
// Bypassing this stage if there are no abd server defined
when {
expression { params.ADB_IPAddress != "none" }
}
steps { steps {
echo '\u2705 \u001B[32mTerminate UE\u001B[0m' echo '\u2705 \u001B[32mTerminate UE\u001B[0m'
withCredentials([ withCredentials([
...@@ -275,6 +279,10 @@ pipeline { ...@@ -275,6 +279,10 @@ pipeline {
} }
} }
stage('Terminate SPGW') { stage('Terminate SPGW') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps { steps {
echo '\u2705 \u001B[32mTerminate SPGW\u001B[0m' echo '\u2705 \u001B[32mTerminate SPGW\u001B[0m'
withCredentials([ withCredentials([
...@@ -292,6 +300,10 @@ pipeline { ...@@ -292,6 +300,10 @@ pipeline {
} }
} }
stage('Terminate MME') { stage('Terminate MME') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps { steps {
echo '\u2705 \u001B[32mTerminate MME\u001B[0m' echo '\u2705 \u001B[32mTerminate MME\u001B[0m'
withCredentials([ withCredentials([
...@@ -309,6 +321,10 @@ pipeline { ...@@ -309,6 +321,10 @@ pipeline {
} }
} }
stage('Terminate HSS') { stage('Terminate HSS') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps { steps {
echo '\u2705 \u001B[32mTerminate HSS\u001B[0m' echo '\u2705 \u001B[32mTerminate HSS\u001B[0m'
withCredentials([ withCredentials([
...@@ -371,6 +387,10 @@ pipeline { ...@@ -371,6 +387,10 @@ pipeline {
} }
} }
stage('Log Collection (SPGW)') { stage('Log Collection (SPGW)') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps { steps {
withCredentials([ withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'] [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
...@@ -389,6 +409,10 @@ pipeline { ...@@ -389,6 +409,10 @@ pipeline {
} }
} }
stage('Log Collection (MME)') { stage('Log Collection (MME)') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps { steps {
withCredentials([ withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'] [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
...@@ -407,6 +431,10 @@ pipeline { ...@@ -407,6 +431,10 @@ pipeline {
} }
} }
stage('Log Collection (HSS)') { stage('Log Collection (HSS)') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps { steps {
withCredentials([ withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'] [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
...@@ -425,6 +453,10 @@ pipeline { ...@@ -425,6 +453,10 @@ pipeline {
} }
} }
stage('Log Collection (Ping)') { stage('Log Collection (Ping)') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps { steps {
withCredentials([ withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'] [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
...@@ -443,6 +475,10 @@ pipeline { ...@@ -443,6 +475,10 @@ pipeline {
} }
} }
stage('Log Collection (Iperf)') { stage('Log Collection (Iperf)') {
// Bypassing this stage if EPC server is not defined
when {
expression { params.EPC_IPAddress != "none" }
}
steps { steps {
withCredentials([ withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'] [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
...@@ -482,7 +518,7 @@ pipeline { ...@@ -482,7 +518,7 @@ pipeline {
// Making sure that we really shutdown every thing before leaving // Making sure that we really shutdown every thing before leaving
failure { failure {
script { script {
if (!termStatusArray[termUE]) { if ((!termStatusArray[termUE]) && (params.ADB_IPAddress != "none")) {
withCredentials([ withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.ADB_Credentials}", usernameVariable: 'ADB_Username', passwordVariable: 'ADB_Password'] [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.ADB_Credentials}", usernameVariable: 'ADB_Username', passwordVariable: 'ADB_Password']
]) { ]) {
...@@ -496,21 +532,21 @@ pipeline { ...@@ -496,21 +532,21 @@ pipeline {
sh "python3 ci-scripts/main.py --mode=TerminateeNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}" sh "python3 ci-scripts/main.py --mode=TerminateeNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}"
} }
} }
if (!termStatusArray[termSPGW]) { if ((!termStatusArray[termSPGW]) && (params.EPC_IPAddress != "none")) {
withCredentials([ withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'] [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
]) { ]) {
sh "python3 ci-scripts/main.py --mode=TerminateSPGW --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}" sh "python3 ci-scripts/main.py --mode=TerminateSPGW --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
} }
} }
if (!termStatusArray[termMME]) { if ((!termStatusArray[termMME]) && (params.EPC_IPAddress != "none")) {
withCredentials([ withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'] [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
]) { ]) {
sh "python3 ci-scripts/main.py --mode=TerminateMME --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}" sh "python3 ci-scripts/main.py --mode=TerminateMME --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
} }
} }
if (!termStatusArray[termHSS]) { if ((!termStatusArray[termHSS]) && (params.EPC_IPAddress != "none")) {
withCredentials([ withCredentials([
[$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password'] [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
]) { ]) {
......
This diff is collapsed.
#/*
# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# * contributor license agreements. See the NOTICE file distributed with
# * this work for additional information regarding copyright ownership.
# * The OpenAirInterface Software Alliance licenses this file to You under
# * the OAI Public License, Version 1.1 (the "License"); you may not use this file
# * except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.openairinterface.org/?page_id=698
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# *-------------------------------------------------------------------------------
# * For more information about the OpenAirInterface (OAI) Software Alliance:
# * contact@openairinterface.org
# */
#---------------------------------------------------------------------
# Python for CI of OAI-eNB + COTS-UE
#
# Required Python Version
# Python 3.x
#
# Required Python Package
# pexpect
#---------------------------------------------------------------------
#-----------------------------------------------------------
# Import Libs
#-----------------------------------------------------------
import sys # arg
import re # reg
import os
import subprocess
class SplitReport():
def __init__(self):
self.logfilename = ''
self.destinationFolder = ''
def split(self):
self.destinationFolder = self.logfilename.replace(".log","")
if os.path.isfile(self.logfilename):
newImageLog = open(self.logfilename + '.new', 'w')
copyFlag = True
with open(self.logfilename, 'r') as imageLog:
for line in imageLog:
header = False
ret = re.search('====== Start of log for ([0-9\.A-Za-z\-\_]+) ======', line)
if ret is not None:
copyFlag = False
header = True
detailedLogFile = open(self.destinationFolder + '/' + ret.group(1), 'w')
if copyFlag:
newImageLog.write(line)
ret = re.search('====== End of log for ([0-9\.A-Za-z\-\_]+) ======', line)
if ret is not None:
copyFlag = True
detailedLogFile.close()
elif not copyFlag and not header:
detailedLogFile.write(line)
imageLog.close()
newImageLog.close()
os.rename(self.logfilename + '.new', self.logfilename)
else:
print('Cannot split unfound file')
#--------------------------------------------------------------------------------------------------------
#
# Start of main
#
#--------------------------------------------------------------------------------------------------------
argvs = sys.argv
argc = len(argvs)
SP = SplitReport()
while len(argvs) > 1:
myArgv = argvs.pop(1)
if re.match('^\-\-logfilename=(.+)$', myArgv, re.IGNORECASE):
matchReg = re.match('^\-\-logfilename=(.+)$', myArgv, re.IGNORECASE)
SP.logfilename = matchReg.group(1)
SP.split()
sys.exit(0)
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
--> -->
<testCaseList> <testCaseList>
<htmlTabRef>build-tab</htmlTabRef> <htmlTabRef>build-tab</htmlTabRef>
<htmlTabName>Build</htmlTabName> <htmlTabName>Build Container Images</htmlTabName>
<htmlTabIcon>wrench</htmlTabIcon> <htmlTabIcon>wrench</htmlTabIcon>
<TestCaseRequestedList> <TestCaseRequestedList>
000001 000001
...@@ -31,7 +31,7 @@ ...@@ -31,7 +31,7 @@
<testCase id="000001"> <testCase id="000001">
<class>Build_Image</class> <class>Build_Image</class>
<desc>Build eNB Image</desc> <desc>Build all Images</desc>
<kind>all</kind> <kind>all</kind>
<eNB_instance>0</eNB_instance> <eNB_instance>0</eNB_instance>
<eNB_serverId>0</eNB_serverId> <eNB_serverId>0</eNB_serverId>
......
...@@ -50,6 +50,7 @@ REL="Rel15" ...@@ -50,6 +50,7 @@ REL="Rel15"
HW="None" HW="None"
TP="None" TP="None"
EPC=0 EPC=0
VERBOSE_CI=0
VERBOSE_COMPILE=0 VERBOSE_COMPILE=0
CFLAGS_PROCESSOR_USER="" CFLAGS_PROCESSOR_USER=""
RUN_GROUP=0 RUN_GROUP=0
...@@ -324,6 +325,10 @@ function main() { ...@@ -324,6 +325,10 @@ function main() {
HWLAT_TEST=1 HWLAT_TEST=1
echo_info "Will compile hw latency test program" echo_info "Will compile hw latency test program"
shift;; shift;;
--verbose-ci)
VERBOSE_CI=1
echo_info "Will compile with verbose instructions in CI Docker env"
shift;;
--verbose-compile) --verbose-compile)
VERBOSE_COMPILE=1 VERBOSE_COMPILE=1
echo_info "Will compile with verbose instructions" echo_info "Will compile with verbose instructions"
......
...@@ -41,7 +41,7 @@ KERNEL_VERSION=$(uname -r | cut -d '.' -f1) ...@@ -41,7 +41,7 @@ KERNEL_VERSION=$(uname -r | cut -d '.' -f1)
KERNEL_MAJOR=$(uname -r | cut -d '.' -f2) KERNEL_MAJOR=$(uname -r | cut -d '.' -f2)
#check if we run inside a container #check if we run inside a container
IS_CONTAINER=`egrep -c "docker|podman|kubepods" /proc/self/cgroup || true` IS_CONTAINER=`egrep -c "docker|podman|kubepods|libpod|buildah" /proc/self/cgroup || true`
#sudo is not needed when we are root #sudo is not needed when we are root
if [ "$UID" = 0 ] if [ "$UID" = 0 ]
then then
...@@ -221,6 +221,11 @@ compilations() { ...@@ -221,6 +221,11 @@ compilations() {
ret=$? ret=$?
} > $dlog/$2.$REL.txt 2>&1 } > $dlog/$2.$REL.txt 2>&1
set -e set -e
if [ "$VERBOSE_CI" == "1" ]; then
echo_info "====== Start of log for $2.$REL.txt ======"
cat $dlog/$2.$REL.txt
echo_info "====== End of log for $2.$REL.txt ======"
fi
if [[ $ret -ne 0 ]]; then if [[ $ret -ne 0 ]]; then
check_warnings "$dlog/$2.$REL.txt" check_warnings "$dlog/$2.$REL.txt"
check_errors "$dlog/$2.$REL.txt" check_errors "$dlog/$2.$REL.txt"
...@@ -359,10 +364,10 @@ check_install_usrp_uhd_driver(){ ...@@ -359,10 +364,10 @@ check_install_usrp_uhd_driver(){
elif [[ "$OS_BASEDISTRO" == "fedora" ]]; then elif [[ "$OS_BASEDISTRO" == "fedora" ]]; then
if [ $IS_CONTAINER -eq 0 ] if [ $IS_CONTAINER -eq 0 ]
then then
$SUDO $INSTALLER -y install python boost libusb-devel libusbx-devel boost-devel python-mako python-docutils cmake $SUDO $INSTALLER -y install python boost libusb-devel libusbx-devel boost-devel python-mako python-docutils $CMAKE
$SUDO -H pip install requests $SUDO -H pip install requests
else else
$SUDO $INSTALLER -y install boost boost-devel cmake3 $SUDO $INSTALLER -y install boost boost-devel $CMAKE
$SUDO pip3 install mako requests $SUDO pip3 install mako requests
fi fi
if [[ "$OS_DISTRO" == "rhel" ]] || [[ "$OS_DISTRO" == "centos" ]]; then if [[ "$OS_DISTRO" == "rhel" ]] || [[ "$OS_DISTRO" == "centos" ]]; then
...@@ -488,7 +493,7 @@ install_soapy_from_source(){ ...@@ -488,7 +493,7 @@ install_soapy_from_source(){
#git checkout tags/release_003_010_001_001 #git checkout tags/release_003_010_001_001
mkdir -p build mkdir -p build
cd build cd build
cmake ../ $CMAKE ../
echo "Compiling SoapyRemote" echo "Compiling SoapyRemote"
make -j`nproc` make -j`nproc`
$SUDO make install $SUDO make install
...@@ -507,7 +512,7 @@ install_soapy_iris_from_source(){ ...@@ -507,7 +512,7 @@ install_soapy_iris_from_source(){
cd sklk-soapyiris cd sklk-soapyiris
mkdir -p build mkdir -p build
cd build cd build
cmake ../ $CMAKE ../
echo "Compiling SoapyIris" echo "Compiling SoapyIris"
make -j`nproc` make -j`nproc`
$SUDO make install $SUDO make install
...@@ -684,7 +689,7 @@ check_install_oai_software() { ...@@ -684,7 +689,7 @@ check_install_oai_software() {
automake \ automake \
bison \ bison \
build-essential \ build-essential \
cmake \ $CMAKE \
cmake-curses-gui \ cmake-curses-gui \
ninja-build \ ninja-build \
doxygen \ doxygen \
......
...@@ -27,14 +27,15 @@ ...@@ -27,14 +27,15 @@
FROM localhost/ran-build:latest AS enb-build FROM localhost/ran-build:latest AS enb-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image #run build_oai to build the target image
RUN /bin/sh oaienv && \ RUN /bin/sh oaienv && \
cd cmake_targets && \ cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \ mkdir -p log && \
./build_oai --eNB --ninja -w USRP ./build_oai --eNB --ninja -w USRP --verbose-ci
# debug # debug
#RUN ldconfig -v && ldd /oai-ran/targets/bin/lte-softmodem.Rel15 #RUN ldconfig -v && ldd /oai-ran/targets/bin/lte-softmodem.Rel15
......
...@@ -27,14 +27,15 @@ ...@@ -27,14 +27,15 @@
FROM ran-build:latest AS enb-build FROM ran-build:latest AS enb-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image #run build_oai to build the target image
RUN /bin/sh oaienv && \ RUN /bin/sh oaienv && \
cd cmake_targets && \ cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \ mkdir -p log && \
./build_oai --eNB --ninja -w USRP ./build_oai --eNB --ninja -w USRP --verbose-ci
RUN apt-get install -y python3-pip && \ RUN apt-get install -y python3-pip && \
pip3 install --ignore-installed pyyaml && \ pip3 install --ignore-installed pyyaml && \
......
...@@ -27,14 +27,15 @@ ...@@ -27,14 +27,15 @@
FROM localhost/ran-build:latest AS gnb-build FROM localhost/ran-build:latest AS gnb-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image #run build_oai to build the target image
RUN /bin/sh oaienv && \ RUN /bin/sh oaienv && \
cd cmake_targets && \ cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \ mkdir -p log && \
./build_oai --gNB --ninja -w USRP ./build_oai --gNB --ninja -w USRP --verbose-ci
#debug #debug
#RUN ldconfig -v #RUN ldconfig -v
......
...@@ -27,14 +27,15 @@ ...@@ -27,14 +27,15 @@
FROM ran-build:latest AS gnb-build FROM ran-build:latest AS gnb-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image #run build_oai to build the target image
RUN /bin/sh oaienv && \ RUN /bin/sh oaienv && \
cd cmake_targets && \ cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \ mkdir -p log && \
./build_oai --gNB --ninja -w USRP ./build_oai --gNB --ninja -w USRP --verbose-ci
#debug #debug
RUN ldconfig -v RUN ldconfig -v
......
...@@ -27,15 +27,15 @@ ...@@ -27,15 +27,15 @@
FROM localhost/ran-build:latest AS lte-ue-build FROM localhost/ran-build:latest AS lte-ue-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image #run build_oai to build the target image
RUN /bin/sh oaienv && \ RUN /bin/sh oaienv && \
cd cmake_targets && \ cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \ mkdir -p log && \
./build_oai --UE --ninja -w USRP ./build_oai --UE --ninja -w USRP --verbose-ci
# debug # debug
#RUN ldconfig -v && ldd /oai-ran/targets/bin/lte-uesoftmodem.Rel15 #RUN ldconfig -v && ldd /oai-ran/targets/bin/lte-uesoftmodem.Rel15
......
...@@ -27,14 +27,15 @@ ...@@ -27,14 +27,15 @@
FROM ran-build:latest AS lte-ue-build FROM ran-build:latest AS lte-ue-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image #run build_oai to build the target image
RUN /bin/sh oaienv && \ RUN /bin/sh oaienv && \
cd cmake_targets && \ cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \ mkdir -p log && \
./build_oai --UE --ninja -w USRP ./build_oai --UE --ninja -w USRP --verbose-ci
# debug # debug
#RUN ldconfig -v #RUN ldconfig -v
......
...@@ -27,14 +27,15 @@ ...@@ -27,14 +27,15 @@
FROM localhost/ran-build:latest AS nr-ue-build FROM localhost/ran-build:latest AS nr-ue-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image #run build_oai to build the target image
RUN /bin/sh oaienv && \ RUN /bin/sh oaienv && \
cd cmake_targets && \ cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \ mkdir -p log && \
./build_oai --nrUE --ninja -w USRP ./build_oai --nrUE --ninja -w USRP --verbose-ci
# debug # debug
#RUN ldconfig -v #RUN ldconfig -v
......
...@@ -27,14 +27,15 @@ ...@@ -27,14 +27,15 @@
FROM ran-build:latest AS nr-ue-build FROM ran-build:latest AS nr-ue-build
RUN rm -Rf /oai-ran
WORKDIR /oai-ran WORKDIR /oai-ran
COPY . .
#run build_oai to build the target image #run build_oai to build the target image
RUN /bin/sh oaienv && \ RUN /bin/sh oaienv && \
cd cmake_targets && \ cd cmake_targets && \
rm -Rf log && \
mkdir -p log && \ mkdir -p log && \
./build_oai --nrUE --ninja -w USRP ./build_oai --nrUE --ninja -w USRP --verbose-ci
# debug # debug
#RUN ldconfig -v #RUN ldconfig -v
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment