diff --git a/ci-scripts/Jenkinsfile-GitLab-Container b/ci-scripts/Jenkinsfile-GitLab-Container
new file mode 100644
index 0000000000000000000000000000000000000000..fbf9ba8f4b551070ff35bdc6ffbce1f9b0d17d69
--- /dev/null
+++ b/ci-scripts/Jenkinsfile-GitLab-Container
@@ -0,0 +1,261 @@
+#!/bin/groovy
+/*
+ * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The OpenAirInterface Software Alliance licenses this file to You under
+ * the OAI Public License, Version 1.1  (the "License"); you may not use this file
+ * except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.openairinterface.org/?page_id=698
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *-------------------------------------------------------------------------------
+ * For more information about the OpenAirInterface (OAI) Software Alliance:
+ *      contact@openairinterface.org
+ */
+
+// Location of the executor node
+def nodeExecutor = params.nodeExecutor
+
+// Tags to shorten pipeline duration
+def doMandatoryTests = false
+def doFullTestsuite = false
+
+pipeline {
+  agent {
+    label nodeExecutor
+  }
+  options {
+    disableConcurrentBuilds()
+    timestamps()
+    gitLabConnection('OAI GitLab')
+    ansiColor('xterm')
+  }
+
+  stages {
+    stage ("Verify Parameters") {
+      steps {
+        script {
+          JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"'
+          JOB_TIMESTAMP = JOB_TIMESTAMP.trim()
+
+          echo '\u2705 \u001B[32mVerify Parameters\u001B[0m'
+          def allParametersPresent = true
+
+          echo '\u2705 \u001B[32mVerify Labels\u001B[0m'
+          if ("MERGE".equals(env.gitlabActionType)) {
+            LABEL_CHECK = sh returnStdout: true, script: 'ci-scripts/checkGitLabMergeRequestLabels.sh --mr-id ' + env.gitlabMergeRequestIid
+            LABEL_CHECK = LABEL_CHECK.trim()
+            if (LABEL_CHECK == 'NONE') {
+              def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): Your merge request has none of the mandatory labels:\n\n"
+              message += " - BUILD-ONLY\n"
+              message += " - 4G-LTE\n"
+              message += " - 5G-NR\n"
+              message += " - CI\n\n"
+              message += "Not performing CI due to lack of labels"
+              addGitLabMRComment comment: message
+              error('Not performing CI due to lack of labels')
+            } else if (LABEL_CHECK == 'FULL') {
+              doMandatoryTests = true
+              doFullTestsuite = true
+            } else if (LABEL_CHECK == 'SHORTEN-5G') {
+              doMandatoryTests = true
+            } else {
+              def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): We will perform only build stages on your Merge Request"
+              addGitLabMRComment comment: message
+            }
+          } else {
+            doMandatoryTests = true
+            doFullTestsuite = true
+          }
+        }
+      }
+    }
+    stage ("Verify Guidelines") {
+      steps {
+        echo "Git URL     is ${GIT_URL}"
+        echo "GitLab Act    is ${env.gitlabActionType}"
+        script {
+          if ("MERGE".equals(env.gitlabActionType)) {
+            // since a bit, in push events, gitlabUserEmail is not populated
+            gitCommitAuthorEmailAddr = env.gitlabUserEmail
+            echo "GitLab Usermail is ${gitCommitAuthorEmailAddr}"
+            // GitLab-Jenkins plugin integration is lacking to perform the merge by itself
+            // Doing it manually --> it may have merge conflicts
+            sh "./ci-scripts/doGitLabMerge.sh --src-branch ${env.gitlabSourceBranch} --src-commit ${env.gitlabMergeRequestLastCommit} --target-branch ${env.gitlabTargetBranch} --target-commit ${GIT_COMMIT}"
+          } else {
+            echo "Git Branch    is ${GIT_BRANCH}"
+            echo "Git Commit    is ${GIT_COMMIT}"
+            // since a bit, in push events, gitlabUserEmail is not populated
+            gitCommitAuthorEmailAddr = sh returnStdout: true, script: 'git log -n1 --pretty=format:%ae ${GIT_COMMIT}'
+            gitCommitAuthorEmailAddr = gitCommitAuthorEmailAddr.trim()
+            echo "GitLab Usermail is ${gitCommitAuthorEmailAddr}"
+            sh "git log -n1 --pretty=format:\"%s\" > .git/CI_COMMIT_MSG"
+          }
+        }
+      }
+      post {
+        failure {
+          script {
+            def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): Merge Conflicts -- Cannot perform CI"
+            addGitLabMRComment comment: message
+            currentBuild.result = 'FAILURE'
+          }
+        }
+      }
+    }
+    // Build Stages are Mandatory
+    // Later we will add a Ubuntu20 build
+    stage ("Image Building Processes") {
+      parallel {
+        stage ("Ubuntu18 Build") {
+          steps {
+            script {
+              triggerSlaveJob ('RAN-Ubuntu18-Image-Builder', 'Ubuntu18-Images-Build')
+            }
+          }
+          post {
+            always {
+              script {
+                finalizeSlaveJob('RAN-Ubuntu18-Image-Builder')
+              }
+            }
+            failure {
+              script {
+                currentBuild.result = 'FAILURE'
+              }
+            }
+          }
+        }
+        stage ("RHEL8 Build") {
+          steps {
+            script {
+              triggerSlaveJob ('RAN-RHEL8-Image-Builder', 'RHEL8-Images-Build')
+            }
+          }
+          post {
+            always {
+              script {
+                finalizeSlaveJob('RAN-RHEL8-Image-Builder')
+              }
+            }
+            failure {
+              script {
+                currentBuild.result = 'FAILURE'
+              }
+            }
+          }
+        }
+      }
+    }
+  }
+  post {
+    always {
+      script {
+        emailext attachmentsPattern: '*results*.html',
+           body: '''Hi,
+
+Here are attached HTML report files for $PROJECT_NAME - Build # $BUILD_NUMBER - $BUILD_STATUS!
+
+Regards,
+OAI CI Team''',
+           replyTo: 'no-reply@openairinterface.org',
+           subject: '$PROJECT_NAME - Build # $BUILD_NUMBER - $BUILD_STATUS!',
+           to: gitCommitAuthorEmailAddr
+
+        if (fileExists('.git/CI_COMMIT_MSG')) {
+          sh "rm -f .git/CI_COMMIT_MSG"
+        }
+      }
+    }
+    success {
+      script {
+        def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): passed (" + BUILD_URL + ")"
+        if ("MERGE".equals(env.gitlabActionType)) {
+          echo "This is a MERGE event"
+          addGitLabMRComment comment: message
+        }
+      }
+    }
+    failure {
+      script {
+        def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): failed (" + BUILD_URL + ")"
+        if ("MERGE".equals(env.gitlabActionType)) {
+          echo "This is a MERGE event"
+          addGitLabMRComment comment: message
+        }
+      }
+    }
+  }
+}
+
+// ----  Slave Job functions
+
+def triggerSlaveJob (jobName, gitlabStatusName) {
+  // Workaround for the "cancelled" GitLab pipeline notification
+  // The slave job is triggered with the propagate false so the following commands are executed
+  // Its status is now PASS/SUCCESS from a stage pipeline point of view
+  // localStatus variable MUST be analyzed to properly assess the status
+  localStatus = build job: jobName,
+    parameters: [
+      string(name: 'eNB_Repository', value: String.valueOf(GIT_URL)),
+      string(name: 'eNB_Branch', value: String.valueOf(env.gitlabSourceBranch)),
+      string(name: 'eNB_CommitID', value: String.valueOf(env.gitlabMergeRequestLastCommit)),
+      booleanParam(name: 'eNB_mergeRequest', value: "MERGE".equals(env.gitlabActionType)),
+      string(name: 'eNB_TargetBranch', value: String.valueOf(env.gitlabTargetBranch))
+    ], propagate: false
+  localResult = localStatus.getResult()
+  echo "${jobName} Slave Job status is ${localResult}"
+  gitlabCommitStatus(name: gitlabStatusName) {
+    if (localStatus.resultIsBetterOrEqualTo('SUCCESS')) {
+       echo "${jobName} Slave Job is OK"
+    } else {
+       echo "${jobName} Slave Job is KO"
+       sh "ci-scripts/fail.sh"
+    }
+  }
+}
+
+def triggerSlaveJobNoGitLab (jobName) {
+  // Workaround for the "cancelled" GitLab pipeline notification
+  // The slave job is triggered with the propagate false so the following commands are executed
+  // Its status is now PASS/SUCCESS from a stage pipeline point of view
+  // localStatus variable MUST be analyzed to properly assess the status
+  localStatus = build job: jobName,
+    parameters: [
+      string(name: 'eNB_Repository', value: String.valueOf(GIT_URL)),
+      string(name: 'eNB_Branch', value: String.valueOf(env.gitlabSourceBranch)),
+      string(name: 'eNB_CommitID', value: String.valueOf(env.gitlabMergeRequestLastCommit)),
+      booleanParam(name: 'eNB_mergeRequest', value: "MERGE".equals(env.gitlabActionType)),
+      string(name: 'eNB_TargetBranch', value: String.valueOf(env.gitlabTargetBranch))
+    ], propagate: false
+  localResult = localStatus.getResult()
+  echo "${jobName} Slave Job status is ${localResult}"
+  if (localStatus.resultIsBetterOrEqualTo('SUCCESS')) {
+     echo "${jobName} Slave Job is OK"
+  } else {
+     echo "${jobName} Slave Job is KO"
+     sh "ci-scripts/fail.sh"
+  }
+}
+
+def finalizeSlaveJob(jobName) {
+  // In case of any non-success, we are retrieving the HTML report of the last completed
+  // slave job. The only drop-back is that we may retrieve the HTML report of a previous build
+  fileName = "test_results-${jobName}.html"
+  if (!fileExists(fileName)) {
+    copyArtifacts(projectName: jobName,
+      filter: 'test_results*.html',
+      selector: lastCompleted())
+    if (fileExists(fileName)) {
+      sh "sed -i -e 's#TEMPLATE_BUILD_TIME#${JOB_TIMESTAMP}#' ${fileName}"
+      archiveArtifacts artifacts: fileName
+    }
+  }
+}
diff --git a/ci-scripts/Jenkinsfile-tmp-ran b/ci-scripts/Jenkinsfile-tmp-ran
index ab1573de7fc374d040e22e353e1641cca4b70edf..7d5307620f0ad4b8034cc174a86b5fff2544db89 100644
--- a/ci-scripts/Jenkinsfile-tmp-ran
+++ b/ci-scripts/Jenkinsfile-tmp-ran
@@ -241,6 +241,10 @@ pipeline {
         stage ("Terminate") {
             parallel {
                 stage('Terminate UE') {
+                    // Bypassing this stage if there are no abd server defined
+                    when {
+                      expression { params.ADB_IPAddress != "none" }
+                    }
                     steps {
                         echo '\u2705 \u001B[32mTerminate UE\u001B[0m'
                         withCredentials([
@@ -275,6 +279,10 @@ pipeline {
                     }
                 }
                 stage('Terminate SPGW') {
+                    // Bypassing this stage if EPC server is not defined
+                    when {
+                      expression { params.EPC_IPAddress != "none" }
+                    }
                     steps {
                         echo '\u2705 \u001B[32mTerminate SPGW\u001B[0m'
                         withCredentials([
@@ -292,6 +300,10 @@ pipeline {
                     }
                 }
                 stage('Terminate MME') {
+                    // Bypassing this stage if EPC server is not defined
+                    when {
+                      expression { params.EPC_IPAddress != "none" }
+                    }
                     steps {
                         echo '\u2705 \u001B[32mTerminate MME\u001B[0m'
                         withCredentials([
@@ -309,6 +321,10 @@ pipeline {
                     }
                 }
                 stage('Terminate HSS') {
+                    // Bypassing this stage if EPC server is not defined
+                    when {
+                      expression { params.EPC_IPAddress != "none" }
+                    }
                     steps {
                         echo '\u2705 \u001B[32mTerminate HSS\u001B[0m'
                         withCredentials([
@@ -371,6 +387,10 @@ pipeline {
                     }
                 }
                 stage('Log Collection (SPGW)') {
+                    // Bypassing this stage if EPC server is not defined
+                    when {
+                      expression { params.EPC_IPAddress != "none" }
+                    }
                     steps {
                         withCredentials([
                              [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
@@ -389,6 +409,10 @@ pipeline {
                     }
                 }
                 stage('Log Collection (MME)') {
+                    // Bypassing this stage if EPC server is not defined
+                    when {
+                      expression { params.EPC_IPAddress != "none" }
+                    }
                     steps {
                         withCredentials([
                              [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
@@ -407,6 +431,10 @@ pipeline {
                     }
                 }
                 stage('Log Collection (HSS)') {
+                    // Bypassing this stage if EPC server is not defined
+                    when {
+                      expression { params.EPC_IPAddress != "none" }
+                    }
                     steps {
                         withCredentials([
                              [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
@@ -425,6 +453,10 @@ pipeline {
                     }
                 }
                 stage('Log Collection (Ping)') {
+                    // Bypassing this stage if EPC server is not defined
+                    when {
+                      expression { params.EPC_IPAddress != "none" }
+                    }
                     steps {
                         withCredentials([
                              [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
@@ -443,6 +475,10 @@ pipeline {
                     }
                 }
                 stage('Log Collection (Iperf)') {
+                    // Bypassing this stage if EPC server is not defined
+                    when {
+                      expression { params.EPC_IPAddress != "none" }
+                    }
                     steps {
                         withCredentials([
                              [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
@@ -482,7 +518,7 @@ pipeline {
         // Making sure that we really shutdown every thing before leaving
         failure {
             script {
-                if (!termStatusArray[termUE]) {
+                if ((!termStatusArray[termUE]) && (params.ADB_IPAddress != "none")) {
                     withCredentials([
                         [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.ADB_Credentials}", usernameVariable: 'ADB_Username', passwordVariable: 'ADB_Password']
                     ]) {
@@ -496,21 +532,21 @@ pipeline {
                         sh "python3 ci-scripts/main.py --mode=TerminateeNB --eNBIPAddress=${params.eNB_IPAddress} --eNBUserName=${eNB_Username} --eNBPassword=${eNB_Password}"
                     }
                 }
-                if (!termStatusArray[termSPGW]) {
+                if ((!termStatusArray[termSPGW]) && (params.EPC_IPAddress != "none")) {
                     withCredentials([
                         [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
                     ]) {
                         sh "python3 ci-scripts/main.py --mode=TerminateSPGW --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
                     }
                 }
-                if (!termStatusArray[termMME]) {
+                if ((!termStatusArray[termMME]) && (params.EPC_IPAddress != "none")) {
                     withCredentials([
                         [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
                     ]) {
                         sh "python3 ci-scripts/main.py --mode=TerminateMME --EPCIPAddress=${params.EPC_IPAddress} --EPCUserName=${EPC_Username} --EPCPassword=${EPC_Password} --EPCType=${params.EPC_Type} --EPCSourceCodePath=${params.EPC_SourceCodePath}"
                     }
                 }
-                if (!termStatusArray[termHSS]) {
+                if ((!termStatusArray[termHSS]) && (params.EPC_IPAddress != "none")) {
                     withCredentials([
                         [$class: 'UsernamePasswordMultiBinding', credentialsId: "${params.EPC_Credentials}", usernameVariable: 'EPC_Username', passwordVariable: 'EPC_Password']
                     ]) {
diff --git a/ci-scripts/cls_containerize.py b/ci-scripts/cls_containerize.py
index 0c07dfe91fe24f044f5b52915ea12ad57944aa90..0debe424684b8c9396cb85058df8bae4db280d50 100644
--- a/ci-scripts/cls_containerize.py
+++ b/ci-scripts/cls_containerize.py
@@ -35,6 +35,7 @@ import sys              # arg
 import re               # reg
 import logging
 import os
+import shutil
 import time
 from multiprocessing import Process, Lock, SimpleQueue
 from zipfile import ZipFile
@@ -124,7 +125,7 @@ class Containerize():
 			self.cli = 'docker'
 			self.dockerfileprefix = '.ubuntu18'
 		elif self.host == 'Red Hat':
-			self.cli = 'podman'
+			self.cli = 'sudo podman'
 			self.dockerfileprefix = '.rhel8.2'
 
 		imageNames = []
@@ -173,6 +174,7 @@ class Containerize():
 		# if the branch is not develop, then it is a merge request and we need to do 
 		# the potential merge. Note that merge conflicts should already been checked earlier
 		imageTag = 'develop'
+		sharedTag = 'develop'
 		if (self.ranAllowMerge):
 			imageTag = 'ci-temp'
 			if self.ranTargetBranch == '':
@@ -188,38 +190,23 @@ class Containerize():
 			mySSH.command('mkdir -p tmp/entitlement/', '\$', 5) 
 			mySSH.command('sudo cp /etc/rhsm/ca/redhat-uep.pem tmp/ca/', '\$', 5)
 			mySSH.command('sudo cp /etc/pki/entitlement/*.pem tmp/entitlement/', '\$', 5)
-			
-		#mySSH.close()
-		#return 0
+
 		sharedimage = 'ran-build'
 		# Let's remove any previous run artifacts if still there
-		mySSH.command(self.cli + ' image prune --force', '\$', 5)
-		mySSH.command(self.cli + ' image rm ' + sharedimage + ':' + imageTag, '\$', 5)
-		for image,pattern in imageNames:
-			mySSH.command(self.cli + ' image rm ' + image + ':' + imageTag, '\$', 5)
-		# Build the shared image
-		mySSH.command(self.cli + ' build --target ' + sharedimage + ' --tag ' + sharedimage + ':' + imageTag + ' --file docker/Dockerfile.ran' + self.dockerfileprefix + ' --build-arg NEEDED_GIT_PROXY="http://proxy.eurecom.fr:8080" . > cmake_targets/log/ran-build.log 2>&1', '\$', 1600)
-		# Build the target image(s)
-		previousImage = sharedimage + ':' + imageTag
-		danglingShaOnes=[]
+		mySSH.command(self.cli + ' image prune --force', '\$', 30)
+		if (not self.ranAllowMerge):
+			mySSH.command(self.cli + ' image rm ' + sharedimage + ':' + sharedTag, '\$', 30)
 		for image,pattern in imageNames:
-			# the archived Dockerfiles have "ran-build:latest" as base image
-			# we need to update them with proper tag
-			mySSH.command('sed -i -e "s#' + sharedimage + ':latest#' + sharedimage + ':' + imageTag + '#" docker/Dockerfile.' + pattern + self.dockerfileprefix, '\$', 5)
-			mySSH.command(self.cli + ' build --target ' + image + ' --tag ' + image + ':' + imageTag + ' --file docker/Dockerfile.' + pattern + self.dockerfileprefix + ' . > cmake_targets/log/' + image + '.log 2>&1', '\$', 1200)
-			# Retrieving the dangling image(s) for the log collection
-			mySSH.command(self.cli + ' images --filter "dangling=true" --filter "since=' + previousImage + '" -q | sed -e "s#^#sha=#"', '\$', 5)
-			result = re.search('sha=(?P<imageShaOne>[a-zA-Z0-9\-\_]+)', mySSH.getBefore())
-			if result is not None:
-				danglingShaOnes.append((image, result.group('imageShaOne')))
-			previousImage = image + ':' + imageTag
+			mySSH.command(self.cli + ' image rm ' + image + ':' + imageTag, '\$', 30)
 
-		imageTag = 'ci-temp'
-		# First verify if images were properly created.
+		# Build the shared image only on Push Events (not on Merge Requests)
+		if (not self.ranAllowMerge):
+			mySSH.command(self.cli + ' build --target ' + sharedimage + ' --tag ' + sharedimage + ':' + sharedTag + ' --file docker/Dockerfile.ran' + self.dockerfileprefix + ' --build-arg NEEDED_GIT_PROXY="http://proxy.eurecom.fr:8080" . > cmake_targets/log/ran-build.log 2>&1', '\$', 1600)
+		# First verify if the shared image was properly created.
 		status = True
-		mySSH.command(self.cli + ' image inspect --format=\'Size = {{.Size}} bytes\' ' + sharedimage + ':' + imageTag, '\$', 5)
-		if mySSH.getBefore().count('No such object') != 0:
-			logging.error('Could not build properly ran-build')
+		mySSH.command(self.cli + ' image inspect --format=\'Size = {{.Size}} bytes\' ' + sharedimage + ':' + sharedTag, '\$', 5)
+		if mySSH.getBefore().count('o such image') != 0:
+			logging.error('\u001B[1m Could not build properly ran-build\u001B[0m')
 			status = False
 		else:
 			result = re.search('Size *= *(?P<size>[0-9\-]+) *bytes', mySSH.getBefore())
@@ -240,11 +227,40 @@ class Containerize():
 						self.allImagesSize['ran-build'] = str(round(imageSize,1)) + ' Gbytes'
 			else:
 				logging.debug('ran-build size is unknown')
+		# If the shared image failed, no need to continue
+		if not status:
+			# Recover the name of the failed container?
+			mySSH.command(self.cli + ' ps --quiet --filter "status=exited" -n1 | xargs ' + self.cli + ' rm -f', '\$', 5)
+			mySSH.command(self.cli + ' image prune --force', '\$', 30)
+			mySSH.close()
+			logging.error('\u001B[1m Building OAI Images Failed\u001B[0m')
+			HTML.CreateHtmlTestRow(self.imageKind, 'KO', CONST.ALL_PROCESSES_OK)
+			HTML.CreateHtmlTabFooter(False)
+			sys.exit(1)
+		else:
+			# Recover build logs, for the moment only possible when build is successful
+			mySSH.command(self.cli + ' create --name test ' + sharedimage + ':' + sharedTag, '\$', 5)
+			mySSH.command('mkdir -p cmake_targets/log/ran-build', '\$', 5)
+			mySSH.command(self.cli + ' cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/ran-build', '\$', 5)
+			mySSH.command(self.cli + ' rm -f test', '\$', 5)
+
+		# Build the target image(s)
 		for image,pattern in imageNames:
+			# the archived Dockerfiles have "ran-build:latest" as base image
+			# we need to update them with proper tag
+			mySSH.command('sed -i -e "s#' + sharedimage + ':latest#' + sharedimage + ':' + sharedTag + '#" docker/Dockerfile.' + pattern + self.dockerfileprefix, '\$', 5)
+			mySSH.command(self.cli + ' build --target ' + image + ' --tag ' + image + ':' + imageTag + ' --file docker/Dockerfile.' + pattern + self.dockerfileprefix + ' . > cmake_targets/log/' + image + '.log 2>&1', '\$', 1200)
+			# split the log
+			mySSH.command('mkdir -p cmake_targets/log/' + image, '\$', 5)
+			mySSH.command('python3 ci-scripts/docker_log_split.py --logfilename=cmake_targets/log/' + image + '.log', '\$', 5)
+			# checking the status of the build
 			mySSH.command(self.cli + ' image inspect --format=\'Size = {{.Size}} bytes\' ' + image + ':' + imageTag, '\$', 5)
-			if mySSH.getBefore().count('No such object') != 0:
-				logging.error('Could not build properly ' + image)
+			if mySSH.getBefore().count('o such image') != 0:
+				logging.error('\u001B[1m Could not build properly ' + image + '\u001B[0m')
 				status = False
+				# Here we should check if the last container corresponds to a failed command and destroy it
+				mySSH.command(self.cli + ' ps --quiet --filter "status=exited" -n1 | xargs ' + self.cli + ' rm -f', '\$', 5)
+				self.allImagesSize[image] = 'N/A -- Build Failed'
 			else:
 				result = re.search('Size *= *(?P<size>[0-9\-]+) *bytes', mySSH.getBefore())
 				if result is not None:
@@ -264,39 +280,27 @@ class Containerize():
 							self.allImagesSize[image] = str(round(imageSize,1)) + ' Gbytes'
 				else:
 					logging.debug('ran-build size is unknown')
-		if not status:
-			mySSH.close()
-			logging.error('\u001B[1m Building OAI Images Failed\u001B[0m')
-			HTML.CreateHtmlTestRow(self.imageKind, 'KO', CONST.ALL_PROCESSES_OK)
-			#HTML.CreateHtmlNextTabHeaderTestRow(self.collectInfo, self.allImagesSize)
-			HTML.CreateHtmlTabFooter(False)
-			sys.exit(1)
+					self.allImagesSize[image] = 'unknown'
+			# Now pruning dangling images in between target builds
+			mySSH.command(self.cli + ' image prune --force', '\$', 30)
 
-		# Recover build logs, for the moment only possible when build is successful
-		mySSH.command(self.cli + ' create --name test ' + sharedimage + ':' + imageTag, '\$', 5)
-		mySSH.command('mkdir -p cmake_targets/log/ran-build', '\$', 5)
-		mySSH.command(self.cli + ' cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/ran-build', '\$', 5)
-		mySSH.command(self.cli + ' rm -f test', '\$', 5)
-		for image,shaone in danglingShaOnes:
-			mySSH.command('mkdir -p cmake_targets/log/' + image, '\$', 5)
-			mySSH.command(self.cli + ' create --name test ' + shaone, '\$', 5)
-			mySSH.command(self.cli + ' cp test:/oai-ran/cmake_targets/log/. cmake_targets/log/' + image, '\$', 5)
-			mySSH.command(self.cli + ' rm -f test', '\$', 5)
-		mySSH.command(self.cli + ' image prune --force', '\$', 5)
-		mySSH.command('cd cmake_targets', '\$', 5)
+		# Analyzing the logs
+		mySSH.command('cd ' + lSourcePath + '/cmake_targets', '\$', 5)
 		mySSH.command('mkdir -p build_log_' + self.testCase_id, '\$', 5)
 		mySSH.command('mv log/* ' + 'build_log_' + self.testCase_id, '\$', 5)
-		#mySSH.close()
-	
-		mySSH.command('cd /tmp/CI-eNB/cmake_targets', '\$', 5)
+
+		mySSH.command('cd ' + lSourcePath + '/cmake_targets', '\$', 5)
+		mySSH.command('rm -f build_log_' + self.testCase_id + '.zip || true', '\$', 5)
 		if (os.path.isfile('./build_log_' + self.testCase_id + '.zip')):
 			os.remove('./build_log_' + self.testCase_id + '.zip')
+		if (os.path.isdir('./build_log_' + self.testCase_id)):
+			shutil.rmtree('./build_log_' + self.testCase_id)
 		mySSH.command('zip -r -qq build_log_' + self.testCase_id + '.zip build_log_' + self.testCase_id, '\$', 5)
 		mySSH.copyin(lIpAddr, lUserName, lPassWord, lSourcePath + '/cmake_targets/build_log_' + self.testCase_id + '.zip', '.')
-		#mySSH.command('rm -f build_log_' + self.testCase_id + '.zip','\$', 5)
+		mySSH.command('rm -f build_log_' + self.testCase_id + '.zip','\$', 5)
 		mySSH.close()
 		ZipFile('build_log_' + self.testCase_id + '.zip').extractall('.')
-	
+
 		#Trying to identify the errors and warnings for each built images
 		imageNames1 = imageNames
 		shared = ('ran-build','ran')
@@ -328,9 +332,16 @@ class Containerize():
 				files[fil] = errorandwarnings
 			self.collectInfo[image] = files
 		
-		logging.info('\u001B[1m Building OAI Image(s) Pass\u001B[0m')
-		HTML.CreateHtmlTestRow(self.imageKind, 'OK', CONST.ALL_PROCESSES_OK)
-		HTML.CreateHtmlNextTabHeaderTestRow(self.collectInfo, self.allImagesSize)
+		if status:
+			logging.info('\u001B[1m Building OAI Image(s) Pass\u001B[0m')
+			HTML.CreateHtmlTestRow(self.imageKind, 'OK', CONST.ALL_PROCESSES_OK)
+			HTML.CreateHtmlNextTabHeaderTestRow(self.collectInfo, self.allImagesSize)
+		else:
+			logging.error('\u001B[1m Building OAI Images Failed\u001B[0m')
+			HTML.CreateHtmlTestRow(self.imageKind, 'KO', CONST.ALL_PROCESSES_OK)
+			HTML.CreateHtmlNextTabHeaderTestRow(self.collectInfo, self.allImagesSize)
+			HTML.CreateHtmlTabFooter(False)
+			sys.exit(1)
 
 	def DeployObject(self, HTML, EPC):
 		if self.eNB_serverId[self.eNB_instance] == '0':
diff --git a/ci-scripts/docker_log_split.py b/ci-scripts/docker_log_split.py
new file mode 100644
index 0000000000000000000000000000000000000000..8c3c9e41dcac79f2b527fc980da803b7b33465fc
--- /dev/null
+++ b/ci-scripts/docker_log_split.py
@@ -0,0 +1,92 @@
+#/*
+# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
+# * contributor license agreements.  See the NOTICE file distributed with
+# * this work for additional information regarding copyright ownership.
+# * The OpenAirInterface Software Alliance licenses this file to You under
+# * the OAI Public License, Version 1.1  (the "License"); you may not use this file
+# * except in compliance with the License.
+# * You may obtain a copy of the License at
+# *
+# *      http://www.openairinterface.org/?page_id=698
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# *-------------------------------------------------------------------------------
+# * For more information about the OpenAirInterface (OAI) Software Alliance:
+# *      contact@openairinterface.org
+# */
+#---------------------------------------------------------------------
+# Python for CI of OAI-eNB + COTS-UE
+#
+#   Required Python Version
+#     Python 3.x
+#
+#   Required Python Package
+#     pexpect
+#---------------------------------------------------------------------
+
+
+#-----------------------------------------------------------
+# Import Libs
+#-----------------------------------------------------------
+import sys		# arg
+import re		# reg
+import os
+import subprocess
+
+
+class SplitReport():
+	def __init__(self):
+		self.logfilename = ''
+		self.destinationFolder = ''
+
+	def split(self):
+		self.destinationFolder = self.logfilename.replace(".log","")
+		if os.path.isfile(self.logfilename):
+			newImageLog = open(self.logfilename + '.new', 'w')
+			copyFlag = True
+			with open(self.logfilename, 'r') as imageLog:
+				for line in imageLog:
+					header = False
+					ret = re.search('====== Start of log for ([0-9\.A-Za-z\-\_]+) ======', line)
+					if ret is not None:
+						copyFlag = False
+						header = True
+						detailedLogFile = open(self.destinationFolder + '/' + ret.group(1), 'w')
+					if copyFlag:
+						newImageLog.write(line)
+					ret = re.search('====== End of log for ([0-9\.A-Za-z\-\_]+) ======', line)
+					if ret is not None:
+						copyFlag = True
+						detailedLogFile.close()
+					elif not copyFlag and not header:
+						detailedLogFile.write(line)
+			imageLog.close()
+			newImageLog.close()
+			os.rename(self.logfilename + '.new', self.logfilename)
+		else:
+			print('Cannot split unfound file')
+
+#--------------------------------------------------------------------------------------------------------
+#
+# Start of main
+#
+#--------------------------------------------------------------------------------------------------------
+
+argvs = sys.argv
+argc = len(argvs)
+
+SP = SplitReport()
+
+while len(argvs) > 1:
+	myArgv = argvs.pop(1)
+	if re.match('^\-\-logfilename=(.+)$', myArgv, re.IGNORECASE):
+		matchReg = re.match('^\-\-logfilename=(.+)$', myArgv, re.IGNORECASE)
+		SP.logfilename = matchReg.group(1)
+
+SP.split()
+
+sys.exit(0)
diff --git a/ci-scripts/xml_files/fr1_image_build.xml b/ci-scripts/xml_files/container_image_build.xml
similarity index 93%
rename from ci-scripts/xml_files/fr1_image_build.xml
rename to ci-scripts/xml_files/container_image_build.xml
index 9dd386b646cd6e1975882e62f49852700efd5cf8..430729c49c79b8b09cca0cf109c1495ac44c12aa 100644
--- a/ci-scripts/xml_files/fr1_image_build.xml
+++ b/ci-scripts/xml_files/container_image_build.xml
@@ -22,7 +22,7 @@
 -->
 <testCaseList>
 	<htmlTabRef>build-tab</htmlTabRef>
-	<htmlTabName>Build</htmlTabName>
+	<htmlTabName>Build Container Images</htmlTabName>
 	<htmlTabIcon>wrench</htmlTabIcon>
 	<TestCaseRequestedList>
  000001
@@ -31,7 +31,7 @@
 
 	<testCase id="000001">
 		<class>Build_Image</class>
-		<desc>Build eNB Image</desc>
+		<desc>Build all Images</desc>
 		<kind>all</kind>
 		<eNB_instance>0</eNB_instance>
 		<eNB_serverId>0</eNB_serverId>
diff --git a/cmake_targets/build_oai b/cmake_targets/build_oai
index 960b24104773b4ce6816c6c90920389eb4b98b5c..dbcadba10353e4fa1955e8340c7a54c222989e45 100755
--- a/cmake_targets/build_oai
+++ b/cmake_targets/build_oai
@@ -50,6 +50,7 @@ REL="Rel15"
 HW="None"
 TP="None"
 EPC=0
+VERBOSE_CI=0
 VERBOSE_COMPILE=0
 CFLAGS_PROCESSOR_USER=""
 RUN_GROUP=0
@@ -324,6 +325,10 @@ function main() {
             HWLAT_TEST=1
             echo_info "Will compile hw latency test program"
             shift;;
+       --verbose-ci)
+	        VERBOSE_CI=1
+            echo_info "Will compile with verbose instructions in CI Docker env"
+            shift;;
        --verbose-compile)
 	        VERBOSE_COMPILE=1
             echo_info "Will compile with verbose instructions"
diff --git a/cmake_targets/tools/build_helper b/cmake_targets/tools/build_helper
index 977827b0c918116ddad1b510ada74f6f6c39c0c4..9b8363fc2b96f6e966418847029ba7926572ae4c 100755
--- a/cmake_targets/tools/build_helper
+++ b/cmake_targets/tools/build_helper
@@ -41,7 +41,7 @@ KERNEL_VERSION=$(uname -r | cut -d '.' -f1)
 KERNEL_MAJOR=$(uname -r | cut -d '.' -f2)
 
 #check if we run inside a container
-IS_CONTAINER=`egrep -c "docker|podman|kubepods" /proc/self/cgroup || true`
+IS_CONTAINER=`egrep -c "docker|podman|kubepods|libpod|buildah" /proc/self/cgroup || true`
 #sudo is not needed when we are root
 if [ "$UID" = 0 ]
 then
@@ -221,6 +221,11 @@ compilations() {
     ret=$?
   } > $dlog/$2.$REL.txt 2>&1
   set -e
+  if [ "$VERBOSE_CI" == "1" ]; then
+     echo_info "====== Start of log for $2.$REL.txt ======"
+     cat $dlog/$2.$REL.txt
+     echo_info "====== End of log for $2.$REL.txt ======"
+  fi
   if [[ $ret -ne 0 ]]; then
      check_warnings "$dlog/$2.$REL.txt"
      check_errors "$dlog/$2.$REL.txt"
@@ -359,10 +364,10 @@ check_install_usrp_uhd_driver(){
     elif [[ "$OS_BASEDISTRO" == "fedora" ]]; then
         if [ $IS_CONTAINER -eq 0 ]
         then
-            $SUDO $INSTALLER -y install python boost libusb-devel libusbx-devel boost-devel python-mako python-docutils cmake
+            $SUDO $INSTALLER -y install python boost libusb-devel libusbx-devel boost-devel python-mako python-docutils $CMAKE
             $SUDO -H pip install requests
         else
-            $SUDO $INSTALLER -y install boost boost-devel cmake3
+            $SUDO $INSTALLER -y install boost boost-devel $CMAKE
             $SUDO pip3 install mako requests
         fi
         if [[ "$OS_DISTRO" == "rhel" ]] || [[ "$OS_DISTRO" == "centos" ]]; then
@@ -488,7 +493,7 @@ install_soapy_from_source(){
     #git checkout tags/release_003_010_001_001
     mkdir -p build
     cd build
-    cmake ../
+    $CMAKE ../
     echo "Compiling SoapyRemote"
     make -j`nproc`
     $SUDO make install
@@ -507,7 +512,7 @@ install_soapy_iris_from_source(){
     cd sklk-soapyiris
     mkdir -p build
     cd build
-    cmake ../
+    $CMAKE ../
     echo "Compiling SoapyIris"
     make -j`nproc`
     $SUDO make install
@@ -684,7 +689,7 @@ check_install_oai_software() {
 	automake  \
 	bison  \
 	build-essential \
-	cmake \
+	$CMAKE \
 	cmake-curses-gui  \
         ninja-build \
 	doxygen \
diff --git a/docker/Dockerfile.eNB.rhel8.2 b/docker/Dockerfile.eNB.rhel8.2
index 5fcd5a7b9f07d041f66d3aa92dca8a3c72c57c83..aa8b9aa228962ed012e39d2bb21a02e0a62d2b8f 100644
--- a/docker/Dockerfile.eNB.rhel8.2
+++ b/docker/Dockerfile.eNB.rhel8.2
@@ -27,14 +27,15 @@
 
 FROM localhost/ran-build:latest AS enb-build 
 
+RUN rm -Rf /oai-ran
 WORKDIR /oai-ran
+COPY . .
 
 #run build_oai to build the target image
 RUN /bin/sh oaienv && \ 
     cd cmake_targets && \
-    rm -Rf log && \
     mkdir -p log && \
-    ./build_oai --eNB --ninja -w USRP
+    ./build_oai --eNB --ninja -w USRP --verbose-ci
 
 # debug
 #RUN ldconfig -v && ldd /oai-ran/targets/bin/lte-softmodem.Rel15
diff --git a/docker/Dockerfile.eNB.ubuntu18 b/docker/Dockerfile.eNB.ubuntu18
index cdfb37ebfc343637e2bfbdce2ff3d2f0f147a09c..95d7194c317e6c38fceefbfebfd93fc64e649ca0 100644
--- a/docker/Dockerfile.eNB.ubuntu18
+++ b/docker/Dockerfile.eNB.ubuntu18
@@ -27,14 +27,15 @@
 
 FROM ran-build:latest AS enb-build 
 
+RUN rm -Rf /oai-ran
 WORKDIR /oai-ran
+COPY . .
 
 #run build_oai to build the target image
 RUN /bin/sh oaienv && \ 
     cd cmake_targets && \
-    rm -Rf log && \
     mkdir -p log && \
-    ./build_oai --eNB --ninja -w USRP
+    ./build_oai --eNB --ninja -w USRP --verbose-ci
 
 RUN apt-get install -y python3-pip && \
     pip3 install --ignore-installed pyyaml && \
diff --git a/docker/Dockerfile.gNB.rhel8.2 b/docker/Dockerfile.gNB.rhel8.2
index fb9ade927f8db1a1c7d559b3ddb26db4cb6af8df..6969c691fdf6197dccb14cf8925bbda766f3581d 100644
--- a/docker/Dockerfile.gNB.rhel8.2
+++ b/docker/Dockerfile.gNB.rhel8.2
@@ -27,14 +27,15 @@
 
 FROM localhost/ran-build:latest AS gnb-build 
 
+RUN rm -Rf /oai-ran
 WORKDIR /oai-ran
+COPY . .
 
 #run build_oai to build the target image
 RUN /bin/sh oaienv && \ 
     cd cmake_targets && \
-    rm -Rf log && \
     mkdir -p log && \
-    ./build_oai --gNB --ninja -w USRP
+    ./build_oai --gNB --ninja -w USRP --verbose-ci
 
 #debug
 #RUN ldconfig -v
diff --git a/docker/Dockerfile.gNB.ubuntu18 b/docker/Dockerfile.gNB.ubuntu18
index d43ad4709dbedcef7c76861cc2677a38c95dc8d4..276d6989c4e9bf61d87cb4c86a32ff8c55ff7d4b 100644
--- a/docker/Dockerfile.gNB.ubuntu18
+++ b/docker/Dockerfile.gNB.ubuntu18
@@ -27,14 +27,15 @@
 
 FROM ran-build:latest AS gnb-build 
 
+RUN rm -Rf /oai-ran
 WORKDIR /oai-ran
+COPY . .
 
 #run build_oai to build the target image
 RUN /bin/sh oaienv && \ 
     cd cmake_targets && \
-    rm -Rf log && \
     mkdir -p log && \
-    ./build_oai --gNB --ninja -w USRP
+    ./build_oai --gNB --ninja -w USRP --verbose-ci
 
 #debug
 RUN ldconfig -v
diff --git a/docker/Dockerfile.lteUE.rhel8.2 b/docker/Dockerfile.lteUE.rhel8.2
index 0a853dcd6b5dc211a730cf0a82f074e9118eea61..e22343d63cec65d90e23051c44abb3da778094b3 100644
--- a/docker/Dockerfile.lteUE.rhel8.2
+++ b/docker/Dockerfile.lteUE.rhel8.2
@@ -27,15 +27,15 @@
 
 FROM localhost/ran-build:latest AS lte-ue-build 
 
-
+RUN rm -Rf /oai-ran
 WORKDIR /oai-ran
+COPY . .
 
 #run build_oai to build the target image
 RUN /bin/sh oaienv && \ 
     cd cmake_targets && \
-    rm -Rf log && \
     mkdir -p log && \
-    ./build_oai --UE --ninja -w USRP
+    ./build_oai --UE --ninja -w USRP --verbose-ci
 
 # debug
 #RUN ldconfig -v && ldd /oai-ran/targets/bin/lte-uesoftmodem.Rel15
diff --git a/docker/Dockerfile.lteUE.ubuntu18 b/docker/Dockerfile.lteUE.ubuntu18
index 8793cc54f79d87e470a46a4520b7f2ee519d1865..6af2e619918e776d0b46ec4b466d4677dacd627a 100644
--- a/docker/Dockerfile.lteUE.ubuntu18
+++ b/docker/Dockerfile.lteUE.ubuntu18
@@ -27,14 +27,15 @@
 
 FROM ran-build:latest AS lte-ue-build 
 
+RUN rm -Rf /oai-ran
 WORKDIR /oai-ran
+COPY . .
 
 #run build_oai to build the target image
 RUN /bin/sh oaienv && \ 
     cd cmake_targets && \
-    rm -Rf log && \
     mkdir -p log && \
-    ./build_oai --UE --ninja -w USRP
+    ./build_oai --UE --ninja -w USRP --verbose-ci
 
 # debug
 #RUN ldconfig -v
diff --git a/docker/Dockerfile.nrUE.rhel8.2 b/docker/Dockerfile.nrUE.rhel8.2
index f9f3f8e4bfcdfd4e95d3d8ae10ed206788f7af53..01988fd6ff6608d8f132eefbce4524c16149a305 100644
--- a/docker/Dockerfile.nrUE.rhel8.2
+++ b/docker/Dockerfile.nrUE.rhel8.2
@@ -27,14 +27,15 @@
 
 FROM localhost/ran-build:latest AS nr-ue-build 
 
+RUN rm -Rf /oai-ran
 WORKDIR /oai-ran
+COPY . .
 
 #run build_oai to build the target image
 RUN /bin/sh oaienv && \ 
     cd cmake_targets && \
-    rm -Rf log && \
     mkdir -p log && \
-    ./build_oai --nrUE --ninja -w USRP
+    ./build_oai --nrUE --ninja -w USRP --verbose-ci
 
 # debug
 #RUN ldconfig -v
diff --git a/docker/Dockerfile.nrUE.ubuntu18 b/docker/Dockerfile.nrUE.ubuntu18
index 5450194f3539ad83663017e31e53e10185dc9a37..c96bcf80ef106613c06080574455f09f724cb42a 100644
--- a/docker/Dockerfile.nrUE.ubuntu18
+++ b/docker/Dockerfile.nrUE.ubuntu18
@@ -27,14 +27,15 @@
 
 FROM ran-build:latest AS nr-ue-build 
 
+RUN rm -Rf /oai-ran
 WORKDIR /oai-ran
+COPY . .
 
 #run build_oai to build the target image
 RUN /bin/sh oaienv && \ 
     cd cmake_targets && \
-    rm -Rf log && \
     mkdir -p log && \
-    ./build_oai --nrUE --ninja -w USRP
+    ./build_oai --nrUE --ninja -w USRP --verbose-ci
 
 # debug
 #RUN ldconfig -v