testId2USERNAME2PASSWORD2|g' settingsFileId")
+ settingsFile == 'SETTINGS_FILE'
+ newSettingsFile == 'settingsFileId'
+ }
+
+}
diff --git a/jenkins-pipeline-shared-libraries/test/vars/MavenSpec.groovy b/jenkins-pipeline-shared-libraries/test/vars/MavenSpec.groovy
new file mode 100644
index 000000000..8e21c523a
--- /dev/null
+++ b/jenkins-pipeline-shared-libraries/test/vars/MavenSpec.groovy
@@ -0,0 +1,370 @@
+import com.homeaway.devtools.jenkins.testing.JenkinsPipelineSpecification
+import groovy.util.slurpersupport.GPathResult
+import groovy.util.slurpersupport.NodeChild
+import groovy.util.slurpersupport.NodeChildren
+
+class MavenSpec extends JenkinsPipelineSpecification {
+ def mavenGroovy = null
+
+ class VersionChildNode {
+ String version
+
+ VersionChildNode(String version) {
+ this.version = version;
+ }
+
+ def text() {
+ return this.version;
+ }
+ }
+
+ def setup() {
+ mavenGroovy = loadPipelineScriptForTest("vars/maven.groovy")
+ }
+
+ def "[maven.groovy] run Maven"() {
+ when:
+ mavenGroovy.runMaven("clean install")
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B clean install', returnStdout: false])
+ }
+
+ def "[maven.groovy] run Maven with option"() {
+ when:
+ mavenGroovy.runMaven("clean install", ['-fae'])
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -fae clean install', returnStdout: false])
+ }
+
+ def "[maven.groovy] run Maven with log file"() {
+ setup:
+ Properties props = new Properties()
+ props.setProperty("anykey", "anyvalue")
+ when:
+ mavenGroovy.runMaven("clean install", ['-fae'], props, "logFile.txt")
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -fae clean install -Danykey=anyvalue | tee $WORKSPACE/logFile.txt ; test ${PIPESTATUS[0]} -eq 0', returnStdout: false])
+ }
+
+ def "[maven.groovy] run Maven with skip tests"() {
+ when:
+ mavenGroovy.runMaven("clean install", true, ['-fae'], "logFile.txt")
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -fae clean install -DskipTests=true | tee $WORKSPACE/logFile.txt ; test ${PIPESTATUS[0]} -eq 0', returnStdout: false])
+ }
+
+ def "[maven.groovy] run Maven without skip tests"() {
+ when:
+ mavenGroovy.runMaven("clean install", false, ['-fae'], "logFile.txt")
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -fae clean install -DskipTests=false | tee $WORKSPACE/logFile.txt ; test ${PIPESTATUS[0]} -eq 0', returnStdout: false])
+ }
+
+ def "[maven.groovy] run Maven without log file"() {
+ setup:
+ Properties props = new Properties()
+ props.setProperty("anykey", "anyvalue")
+ when:
+ mavenGroovy.runMaven("clean install", ['-fae'], props)
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -fae clean install -Danykey=anyvalue', returnStdout: false])
+ }
+
+ def "[maven.groovy] run Maven with settings with log file"() {
+ setup:
+ mavenGroovy.getBinding().setVariable('MAVEN_SETTINGS_XML','settingsFileId')
+ Properties properties = new Properties()
+ properties.put('property1', 'value1')
+ when:
+ mavenGroovy.runMavenWithSettings("settings.xml", "clean install", properties, "logFile.txt")
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -s settingsFileId -fae clean install -Dproperty1=value1 | tee $WORKSPACE/logFile.txt ; test ${PIPESTATUS[0]} -eq 0', returnStdout: false])
+ }
+
+ def "[maven.groovy] run Maven with settings without log file"() {
+ setup:
+ mavenGroovy.getBinding().setVariable('MAVEN_SETTINGS_XML','settingsFileId')
+ Properties properties = new Properties()
+ properties.put('property1b', 'value1b')
+ when:
+ mavenGroovy.runMavenWithSettings("settings.xml", "clean install", properties)
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -s settingsFileId -fae clean install -Dproperty1b=value1b', returnStdout: false])
+ }
+
+ def "[maven.groovy] run Maven with settings without properties"() {
+ setup:
+ mavenGroovy.getBinding().setVariable('MAVEN_SETTINGS_XML', 'settingsFileId')
+ Properties properties = new Properties()
+ when:
+ mavenGroovy.runMavenWithSettings("settings.xml", "clean install", properties, "logFile.txt")
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -s settingsFileId -fae clean install | tee $WORKSPACE/logFile.txt ; test ${PIPESTATUS[0]} -eq 0', returnStdout: false])
+ }
+
+ def "[maven.groovy] run Maven sonar settings with log file"() {
+ setup:
+ mavenGroovy.getBinding().setVariable('MAVEN_SETTINGS_XML', 'settingsFileId')
+ mavenGroovy.getBinding().setVariable("TOKEN", 'tokenId')
+ when:
+ mavenGroovy.runMavenWithSettingsSonar("settings.xml", "clean install", "sonarCloudId", "logFile.txt")
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -s settingsFileId clean install -Dsonar.login=tokenId | tee $WORKSPACE/logFile.txt ; test ${PIPESTATUS[0]} -eq 0', returnStdout: false])
+ }
+
+ def "[maven.groovy] run Maven sonar settings without log file"() {
+ setup:
+ mavenGroovy.getBinding().setVariable('MAVEN_SETTINGS_XML', 'settingsFileId')
+ mavenGroovy.getBinding().setVariable("TOKEN", 'tokenId')
+ when:
+ mavenGroovy.runMavenWithSettingsSonar("settings.xml", "clean install", "sonarCloudId")
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -s settingsFileId clean install -Dsonar.login=tokenId', returnStdout: false])
+ }
+
+ def "[maven.groovy] run with Settings with log file"() {
+ setup:
+ mavenGroovy.getBinding().setVariable('MAVEN_SETTINGS_XML', 'settingsFileId')
+ Properties properties = new Properties()
+ properties.put('skipTests', true)
+ when:
+ mavenGroovy.runMavenWithSettings("settings.xml", "clean install", true, "logFile.txt")
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -s settingsFileId -fae clean install -DskipTests=true | tee $WORKSPACE/logFile.txt ; test ${PIPESTATUS[0]} -eq 0', returnStdout: false])
+ }
+
+ def "[maven.groovy] run with Settings without log file"() {
+ setup:
+ mavenGroovy.getBinding().setVariable('MAVEN_SETTINGS_XML', 'settingsFileId')
+ when:
+ mavenGroovy.runMavenWithSettings("settings.xml", "clean install", false)
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -s settingsFileId -fae clean install -DskipTests=false', returnStdout: false])
+ }
+
+ def "[maven.groovy] run mvn versions set"() {
+ setup:
+ def String newVersion = '3.6.2'
+ when:
+ mavenGroovy.mvnVersionsSet(newVersion)
+ then:
+ 1 * getPipelineMock("sh")([script: "mvn -B -N -e versions:set -Dfull -DnewVersion=${newVersion} -DallowSnapshots=false -DgenerateBackupPoms=false", returnStdout: false])
+ }
+
+ def "[maven.groovy] run mvn versions set with allow snapshots"() {
+ setup:
+ def String newVersion = '3.6.2'
+ when:
+ mavenGroovy.mvnVersionsSet(newVersion, true)
+ then:
+ 1 * getPipelineMock("sh")([script: "mvn -B -N -e versions:set -Dfull -DnewVersion=${newVersion} -DallowSnapshots=true -DgenerateBackupPoms=false", returnStdout: false])
+ }
+
+ def "[maven.groovy] run mvn versions update parent"() {
+ setup:
+ def String newVersion = '3.6.2'
+ when:
+ mavenGroovy.mvnVersionsUpdateParent(newVersion)
+ then:
+ 1 * getPipelineMock("sh")([script: "mvn -B -N -e versions:update-parent -Dfull -DparentVersion=[${newVersion}] -DallowSnapshots=false -DgenerateBackupPoms=false", returnStdout: false])
+ }
+
+ def "[maven.groovy] run mvn versions update parent with allow snapshots"() {
+ setup:
+ def String newVersion = '3.6.2'
+ when:
+ mavenGroovy.mvnVersionsUpdateParent(newVersion, true)
+ then:
+ 1 * getPipelineMock("sh")([script: "mvn -B -N -e versions:update-parent -Dfull -DparentVersion=[${newVersion}] -DallowSnapshots=true -DgenerateBackupPoms=false", returnStdout: false])
+ }
+
+ def "[maven.groovy] run mvn versions update child modules"() {
+ when:
+ mavenGroovy.mvnVersionsUpdateChildModules()
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -N -e versions:update-child-modules -Dfull -DallowSnapshots=false -DgenerateBackupPoms=false', returnStdout: false])
+ }
+
+ def "[maven.groovy] run mvn versions update child modules with allow snapshots"() {
+ when:
+ mavenGroovy.mvnVersionsUpdateChildModules(true)
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -N -e versions:update-child-modules -Dfull -DallowSnapshots=true -DgenerateBackupPoms=false', returnStdout: false])
+ }
+
+ def "[maven.groovy] run mvn versions update parent and child modules"() {
+ setup:
+ def String newVersion = '3.6.2'
+ when:
+ mavenGroovy.mvnVersionsUpdateParentAndChildModules(newVersion)
+ then:
+ 1 * getPipelineMock("sh")([script: "mvn -B -N -e versions:update-parent -Dfull -DparentVersion=[${newVersion}] -DallowSnapshots=false -DgenerateBackupPoms=false", returnStdout: false])
+ 1 * getPipelineMock("sh")([script: 'mvn -B -N -e versions:update-child-modules -Dfull -DallowSnapshots=false -DgenerateBackupPoms=false', returnStdout: false])
+ }
+
+ def "[maven.groovy] run mvn versions update parent and child modules with allow snapshots"() {
+ setup:
+ def String newVersion = '3.6.2'
+ when:
+ mavenGroovy.mvnVersionsUpdateParentAndChildModules(newVersion, true)
+ then:
+ 1 * getPipelineMock("sh")([script: "mvn -B -N -e versions:update-parent -Dfull -DparentVersion=[${newVersion}] -DallowSnapshots=true -DgenerateBackupPoms=false", returnStdout: false])
+ 1 * getPipelineMock("sh")([script: 'mvn -B -N -e versions:update-child-modules -Dfull -DallowSnapshots=true -DgenerateBackupPoms=false', returnStdout: false])
+ }
+
+ def "[maven.groovy] run mvn get version property"() {
+ setup:
+ String propertyName = 'version.org.kie.kogito'
+ String expectedPropertyValue = 'some-property-value'
+ when:
+ def result = mavenGroovy.mvnGetVersionProperty(propertyName)
+ then:
+ 1 * getPipelineMock("sh")([script: "mvn -B -q -f pom.xml help:evaluate -Dexpression=$propertyName -DforceStdout", returnStdout: true]) >> ' some-property-value '
+ expectedPropertyValue == result
+ }
+
+ def "[maven.groovy] run mvn get version property with custom pom"() {
+ setup:
+ String propertyName = 'version.org.kie.kogito'
+ String pomFile = 'path/to/pom.xml'
+ String expectedPropertyValue = 'some-property-value'
+ when:
+ def result = mavenGroovy.mvnGetVersionProperty(propertyName, pomFile)
+ then:
+ 1 * getPipelineMock("sh")([script: "mvn -B -q -f $pomFile help:evaluate -Dexpression=$propertyName -DforceStdout", returnStdout: true]) >> ' some-property-value '
+ expectedPropertyValue == result
+ }
+
+ def "[maven.groovy] run mvn set version property"() {
+ setup:
+ String newVersion = '1.2.3'
+ String propertyName = 'version.org.kie.kogito'
+ when:
+ mavenGroovy.mvnSetVersionProperty(propertyName, newVersion)
+ then:
+ 1 * getPipelineMock("sh")([script: "mvn -B -e versions:set-property -Dproperty=$propertyName -DnewVersion=$newVersion -DallowSnapshots=true -DgenerateBackupPoms=false", returnStdout: false])
+ }
+
+ def "[maven.groovy] uploadLocalArtifacts"() {
+ setup:
+ String mvnUploadCredsId = 'mvnUploadCredsId'
+ String mvnUploadCreds = 'user:password'
+ mavenGroovy.getBinding().setVariable('kieUnpack', mvnUploadCreds)
+ String artifactDir = '/tmp'
+ String repoUrl = 'https://redhat.com'
+ when:
+ mavenGroovy.uploadLocalArtifacts(mvnUploadCredsId, artifactDir, repoUrl)
+ then:
+ 1 * getPipelineMock('usernameColonPassword.call')([credentialsId: mvnUploadCredsId, variable: 'kieUnpack']) >> mvnUploadCreds
+ 1 * getPipelineMock('withCredentials')([mvnUploadCreds], _ as Closure)
+ 1 * getPipelineMock('dir')(artifactDir, _ as Closure)
+ 1 * getPipelineMock('sh')('zip -r artifacts .')
+ 1 * getPipelineMock('sh')("curl --silent --upload-file artifacts.zip -u ${mvnUploadCreds} -v ${repoUrl}")
+ }
+
+ def "[maven.groovy] getLatestArtifactVersionFromRepository OK"() {
+ setup:
+ String expectedVersion = '7.11.0.redhat-210426'
+ String repositoryUrl = 'http://repoUrl'
+ String groupId = 'org.kie.rhba'
+ String artifactId = 'rhdm'
+ def xmlSlurper = GroovySpy(XmlSlurper.class, global: true)
+ def gPathResult = Mock(GPathResult.class)
+ when:
+ def result = mavenGroovy.getLatestArtifactVersionFromRepository(repositoryUrl, groupId, artifactId)
+ then:
+ 1 * xmlSlurper.parse('http://repoUrl/org/kie/rhba/rhdm/maven-metadata.xml') >> gPathResult
+ 1 * gPathResult.getProperty('versioning') >> gPathResult
+ 1 * gPathResult.getProperty('latest') >> gPathResult
+ 1 * gPathResult.text() >> expectedVersion
+ expectedVersion == result
+ }
+
+ def "[maven.groovy] getLatestArtifactVersionFromRepository null"() {
+ setup:
+ String expectedVersion = null
+ String repositoryUrl = 'http://repoUrl'
+ String groupId = 'org.kie.rhba'
+ String artifactId = 'rhdm'
+ def xmlSlurper = GroovySpy(XmlSlurper.class, global: true)
+ def gPathResult = Mock(GPathResult.class)
+ when:
+ def result = mavenGroovy.getLatestArtifactVersionFromRepository(repositoryUrl, groupId, artifactId)
+ then:
+ 1 * xmlSlurper.parse('http://repoUrl/org/kie/rhba/rhdm/maven-metadata.xml') >> gPathResult
+ 1 * gPathResult.getProperty('versioning') >> null
+ expectedVersion == result
+ }
+
+ def "[maven.groovy] getLatestArtifactVersionPrefixFromRepository OK"() {
+ setup:
+ String expectedVersion = '7.52.0.Final-redhat-00004'
+ String repositoryUrl = 'http://repoUrl'
+ String groupId = 'org.kie'
+ String artifactId = 'kie-api'
+ def xmlSlurper = GroovySpy(XmlSlurper.class, global: true)
+ def gPathResult = Mock(GPathResult.class)
+ def versionIterator = [new VersionChildNode('7.52.0.Final'), new VersionChildNode('7.52.0.Final-redhat-00001'), new VersionChildNode('7.52.0.Final-redhat-00004'), new VersionChildNode('7.52.0.Final-redhat-00003'), new VersionChildNode('7.53.0.Final-redhat-00009')].iterator()
+ when:
+ def result = mavenGroovy.getLatestArtifactVersionPrefixFromRepository(repositoryUrl, groupId, artifactId, '7.52.0.Final-redhat')
+ then:
+ 1 * xmlSlurper.parse('http://repoUrl/org/kie/kie-api/maven-metadata.xml') >> gPathResult
+ 1 * gPathResult.getProperty('versioning') >> gPathResult
+ 1 * gPathResult.getProperty('versions') >> gPathResult
+ 1 * gPathResult.childNodes() >> versionIterator
+ expectedVersion == result
+ }
+
+ def "[maven.groovy] getLatestArtifactVersionPrefixFromRepository null"() {
+ setup:
+ String expectedVersion = null
+ String repositoryUrl = 'http://repoUrl'
+ String groupId = 'org.kie'
+ String artifactId = 'kie-api'
+ def xmlSlurper = GroovySpy(XmlSlurper.class, global: true)
+ def gPathResult = Mock(GPathResult.class)
+ def versionIterator = [new VersionChildNode('7.52.0.Final'), new VersionChildNode('7.52.0.Final-redhat-00001'), new VersionChildNode('7.52.0.Final-redhat-00004'), new VersionChildNode('7.52.0.Final-redhat-00003'), new VersionChildNode('7.53.0.Final-redhat-00009')].iterator()
+ when:
+ def result = mavenGroovy.getLatestArtifactVersionPrefixFromRepository(repositoryUrl, groupId, artifactId, '8.0.0.Final-redhat')
+ then:
+ 1 * xmlSlurper.parse('http://repoUrl/org/kie/kie-api/maven-metadata.xml') >> gPathResult
+ 1 * gPathResult.getProperty('versioning') >> gPathResult
+ 1 * gPathResult.getProperty('versions') >> gPathResult
+ 1 * gPathResult.childNodes() >> versionIterator
+ expectedVersion == result
+ }
+
+ def "[maven.groovy] getProjectPomFromBuildCmd command with -f"() {
+ setup:
+ String buildCmd = "mvn install -f scripts/logic/pom.xml -DskipTests"
+ when:
+ def result = mavenGroovy.getProjectPomFromBuildCmd(buildCmd)
+ then:
+ result == "scripts/logic/pom.xml"
+ }
+
+ def "[maven.groovy] getProjectPomFromBuildCmd command with --file="() {
+ setup:
+ String buildCmd = "mvn install --file=scripts/logic/pom.xml -DskipTests"
+ when:
+ def result = mavenGroovy.getProjectPomFromBuildCmd(buildCmd)
+ then:
+ result == "scripts/logic/pom.xml"
+ }
+
+ def "[maven.groovy] getProjectPomFromBuildCmd command without -f/--file="() {
+ setup:
+ String buildCmd = "mvn install -DskipTests"
+ when:
+ def result = mavenGroovy.getProjectPomFromBuildCmd(buildCmd)
+ then:
+ result == "pom.xml"
+ }
+
+ def "[maven.groovy] cleanRepository"() {
+ when:
+ mavenGroovy.cleanRepository()
+ then:
+ 1 * getPipelineMock('sh')('rm -rf $HOME/.m2/repository')
+ }
+}
diff --git a/jenkins-pipeline-shared-libraries/test/vars/MavenStagingHelperSpec.groovy b/jenkins-pipeline-shared-libraries/test/vars/MavenStagingHelperSpec.groovy
new file mode 100644
index 000000000..11e39bd34
--- /dev/null
+++ b/jenkins-pipeline-shared-libraries/test/vars/MavenStagingHelperSpec.groovy
@@ -0,0 +1,98 @@
+import com.homeaway.devtools.jenkins.testing.JenkinsPipelineSpecification
+import org.jenkinsci.plugins.workflow.steps.Step
+import org.jenkinsci.plugins.workflow.steps.StepContext
+import org.jenkinsci.plugins.workflow.steps.StepExecution
+import org.kie.jenkins.MavenStagingHelper
+
+class MavenStagingHelperSpec extends JenkinsPipelineSpecification {
+ def steps
+
+ def setup() {
+ steps = new Step() {
+ @Override
+ StepExecution start(StepContext stepContext) throws Exception {
+ return null
+ }
+ }
+ }
+
+ def "[MavenStagingHelper.groovy] stageLocalArtifacts"() {
+ setup:
+ def helper = new MavenStagingHelper(steps)
+ when:
+ helper.stageLocalArtifacts('ID', 'FOLDER')
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -q help:evaluate -Dexpression=project.artifactId -DforceStdout', returnStdout: true]) >> { return 'NAME' }
+ 1 * getPipelineMock("sh")([script: 'mvn -B -q help:evaluate -Dexpression=project.version -DforceStdout', returnStdout: true]) >> { return 'VS' }
+ 1 * getPipelineMock("sh")([script: "mvn -B --projects :NAME org.sonatype.plugins:nexus-staging-maven-plugin:1.6.13:deploy-staged-repository -DnexusUrl=https://repository.jboss.org/nexus -DserverId=jboss-releases-repository -DstagingDescription='NAME VS' -DkeepStagingRepositoryOnCloseRuleFailure=true -DstagingProgressTimeoutMinutes=10 -DrepositoryDirectory=FOLDER -DstagingProfileId=ID", returnStdout: false])
+ 1 * getPipelineMock("sh")([script: 'find FOLDER -name *.properties', returnStdout: true]) >> { return 'file.properties' }
+ 1 * getPipelineMock("readProperties")([file: 'file.properties']) >> { return ['stagingRepository.id':'STAGING_ID'] }
+ 'STAGING_ID' == helper.stagingRepositoryId
+ }
+
+ def "[MavenStagingHelper.groovy] stageLocalArtifacts empty stagingProfileId"() {
+ setup:
+ def helper = new MavenStagingHelper(steps)
+ when:
+ helper.stageLocalArtifacts('', 'FOLDER')
+ then:
+ thrown(AssertionError)
+ }
+
+ def "[MavenStagingHelper.groovy] stageLocalArtifacts empty artifacts folder"() {
+ setup:
+ def helper = new MavenStagingHelper(steps)
+ when:
+ helper.stageLocalArtifacts('ID', '')
+ then:
+ thrown(AssertionError)
+ }
+
+ def "[MavenStagingHelper.groovy] promoteStagingRepository"() {
+ setup:
+ def helper = new MavenStagingHelper(steps)
+ when:
+ helper.withStagingRepositoryId('STAGING_ID')
+ .promoteStagingRepository('ID')
+ then:
+ 1 * getPipelineMock("sh")([script: 'mvn -B -q help:evaluate -Dexpression=project.artifactId -DforceStdout', returnStdout: true]) >> { return 'NAME' }
+ 1 * getPipelineMock("sh")([script: 'mvn -B -q help:evaluate -Dexpression=project.version -DforceStdout', returnStdout: true]) >> { return 'VS' }
+ 1 * getPipelineMock("sh")([script: "mvn -B --projects :NAME org.sonatype.plugins:nexus-staging-maven-plugin:1.6.13:promote -DnexusUrl=https://repository.jboss.org/nexus -DserverId=jboss-releases-repository -DstagingDescription='NAME VS' -DbuildPromotionProfileId=ID -DstagingRepositoryId=STAGING_ID", returnStdout: false])
+ }
+
+ def "[MavenStagingHelper.groovy] promoteStagingRepository empty buildPromotionProfileId"() {
+ setup:
+ def helper = new MavenStagingHelper(steps)
+ when:
+ helper.withStagingRepositoryId('STAGING_ID')
+ .promoteStagingRepository('')
+ then:
+ thrown(AssertionError)
+ }
+
+ def "[MavenStagingHelper.groovy] promoteStagingRepository no stagingRepositoryId"() {
+ setup:
+ def helper = new MavenStagingHelper(steps)
+ when:
+ helper.promoteStagingRepository('ID')
+ then:
+ thrown(AssertionError)
+ }
+
+ def "[MavenStagingHelper.groovy] full process"() {
+ setup:
+ def helper = new MavenStagingHelper(steps)
+ .withNexusReleaseUrl('NEXUS_URL')
+ .withNexusReleaseRepositoryId('NEXUS_REPOSITORY_ID')
+ .withStagingDescription('DESCRIPTION')
+ getPipelineMock("sh")([script: 'mvn -B -q help:evaluate -Dexpression=project.artifactId -DforceStdout', returnStdout: true]) >> { return 'NAME' }
+ getPipelineMock("sh")([script: 'mvn -B -q help:evaluate -Dexpression=project.version -DforceStdout', returnStdout: true]) >> { return 'VS' }
+ getPipelineMock("sh")([script: 'find FOLDER -name *.properties', returnStdout: true]) >> { return 'file.properties' }
+ getPipelineMock("readProperties")([file: 'file.properties']) >> { return ['stagingRepository.id':'STAGING_ID'] }
+ helper.stageLocalArtifacts('STAGE_PROFILE_ID', 'FOLDER')
+ when:
+ helper.promoteStagingRepository('BUILD_PROMOTE_ID')
+ then:
+ 1 * getPipelineMock("sh")([script: "mvn -B --projects :NAME org.sonatype.plugins:nexus-staging-maven-plugin:1.6.13:promote -DnexusUrl=NEXUS_URL -DserverId=NEXUS_REPOSITORY_ID -DstagingDescription='DESCRIPTION' -DbuildPromotionProfileId=BUILD_PROMOTE_ID -DstagingRepositoryId=STAGING_ID", returnStdout: false])
+ }
+}
diff --git a/jenkins-pipeline-shared-libraries/test/vars/PullRequestSpec.groovy b/jenkins-pipeline-shared-libraries/test/vars/PullRequestSpec.groovy
new file mode 100644
index 000000000..2545e8e59
--- /dev/null
+++ b/jenkins-pipeline-shared-libraries/test/vars/PullRequestSpec.groovy
@@ -0,0 +1,35 @@
+import com.homeaway.devtools.jenkins.testing.JenkinsPipelineSpecification
+
+class PullRequestSpec extends JenkinsPipelineSpecification {
+ def groovyScript = null
+ def projectCollection = ['projectA', 'projectB', 'projectC']
+
+ def setup() {
+ groovyScript = loadPipelineScriptForTest("vars/pullrequest.groovy")
+ explicitlyMockPipelineVariable("out")
+ groovyScript.metaClass.WORKSPACE = '/'
+ }
+
+ def "PR from fork getAuthorAndRepoForPr" () {
+ setup:
+ def env = [:]
+ env['CHANGE_FORK']='contributor/fork'
+ env['CHANGE_URL']='https://github.com/owner/repo/pull/1'
+ groovyScript.getBinding().setVariable('env', env)
+ when:
+ def result = groovyScript.getAuthorAndRepoForPr()
+ then:
+ result == 'contributor/fork'
+ }
+
+ def "PR from origin getAuthorAndRepoForPr" () {
+ setup:
+ def env = [:]
+ env['CHANGE_URL']='https://github.com/owner/repo/pull/1'
+ groovyScript.getBinding().setVariable('env', env)
+ when:
+ def result = groovyScript.getAuthorAndRepoForPr()
+ then:
+ result == 'owner/repo'
+ }
+}
\ No newline at end of file
diff --git a/jenkins-pipeline-shared-libraries/test/vars/SSHShellSpec.groovy b/jenkins-pipeline-shared-libraries/test/vars/SSHShellSpec.groovy
new file mode 100644
index 000000000..8a2299f39
--- /dev/null
+++ b/jenkins-pipeline-shared-libraries/test/vars/SSHShellSpec.groovy
@@ -0,0 +1,128 @@
+import com.homeaway.devtools.jenkins.testing.JenkinsPipelineSpecification
+import org.jenkinsci.plugins.workflow.steps.Step
+import org.jenkinsci.plugins.workflow.steps.StepContext
+import org.jenkinsci.plugins.workflow.steps.StepExecution
+import org.kie.jenkins.shell.SSHShell
+import org.kie.jenkins.shell.installation.Installation
+
+class SSHShellSpec extends JenkinsPipelineSpecification {
+
+ def steps
+ def env = [:]
+
+ def setup() {
+ steps = new Step() {
+
+ @Override
+ StepExecution start(StepContext stepContext) throws Exception {
+ return null
+ }
+
+ }
+ }
+
+ def "[SSHShell.groovy] getFullCommand default"() {
+ setup:
+ def shell = new SSHShell(steps, 'SERVER')
+ when:
+ def result = shell.getFullCommand('whatever')
+ then:
+ result == "ssh SERVER \"whatever\""
+ }
+
+ def "[SSHShell.groovy] getFullCommand ssh options"() {
+ setup:
+ def shell = new SSHShell(steps, 'SERVER', 'SSH_OPTIONS')
+ when:
+ def result = shell.getFullCommand('whatever')
+ then:
+ result == "ssh SSH_OPTIONS SERVER \"whatever\""
+ }
+
+ def "[SSHShell.groovy] getFullCommand with installations"() {
+ setup:
+ def install1 = Mock(Installation)
+ def install2 = Mock(Installation)
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'mktemp -d']) >> 'TMP_FOLDER'
+ def shell = new SSHShell(steps, 'SERVER')
+ shell.install(install1)
+ shell.install(install2)
+ 1 * install1.getBinaryPaths() >> ['PATH1', 'PATH2']
+ 1 * install2.getBinaryPaths() >> ['PATH3']
+ 1 * install1.getExtraEnvVars() >> [:]
+ 1 * install2.getExtraEnvVars() >> [ install2key : 'install2value' ]
+ when:
+ def result = shell.getFullCommand('whatever')
+ then:
+ result == """ssh SERVER \"export PATH=\${PATH}:PATH1:PATH2:PATH3
+export install2key=install2value
+whatever\""""
+ }
+
+ def "[SSHShell.groovy] getFullCommand with environment variables"() {
+ setup:
+ def shell = new SSHShell(steps, 'SERVER')
+ shell.addEnvironmentVariable('KEY1', 'VALUE1')
+ shell.addEnvironmentVariable('key2', 'value2')
+ when:
+ def result = shell.getFullCommand('whatever')
+ then:
+ result == """ssh SERVER \"export KEY1=VALUE1
+export key2=value2
+whatever\""""
+ }
+
+ def "[SSHShell.groovy] getFullCommand with directory"() {
+ setup:
+ def shell = new SSHShell(steps, 'SERVER')
+ when:
+ def result = shell.getFullCommand('whatever', 'DIR')
+ then:
+ result == """ssh SERVER \"mkdir -p DIR && cd DIR
+whatever\""""
+ }
+
+ def "[SSHShell.groovy] getFullCommand with all"() {
+ setup:
+ def install1 = Mock(Installation)
+ def install2 = Mock(Installation)
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'mktemp -d']) >> 'TMP_FOLDER'
+ def shell = new SSHShell(steps, 'SERVER', 'SSH_OPTIONS')
+ shell.install(install1)
+ shell.install(install2)
+ shell.addEnvironmentVariable('KEY1', 'VALUE1')
+ shell.addEnvironmentVariable('key2', 'value2')
+ 1 * install1.getBinaryPaths() >> ['PATH1', 'PATH2']
+ 1 * install2.getBinaryPaths() >> ['PATH3']
+ 1 * install1.getExtraEnvVars() >> [:]
+ 1 * install2.getExtraEnvVars() >> [ install2key : 'install2value' ]
+ when:
+ def result = shell.getFullCommand('whatever', 'DIR')
+ then:
+ result == """ssh SSH_OPTIONS SERVER \"mkdir -p DIR && cd DIR
+export PATH=\${PATH}:PATH1:PATH2:PATH3
+export install2key=install2value
+export KEY1=VALUE1
+export key2=value2
+whatever\""""
+ }
+
+ def "[SSHShell.groovy] copyFilesFromRemote"() {
+ setup:
+ def shell = new SSHShell(steps, 'SERVER', 'SSH_OPTIONS')
+ when:
+ shell.copyFilesFromRemote('REMOTE', 'LOCAL')
+ then:
+ 1 * getPipelineMock('sh')("scp -r -p SSH_OPTIONS SERVER:REMOTE LOCAL")
+ }
+
+ def "[SSHShell.groovy] copyFilesToRemote"() {
+ setup:
+ def shell = new SSHShell(steps, 'SERVER', 'SSH_OPTIONS')
+ when:
+ shell.copyFilesToRemote('LOCAL', 'REMOTE')
+ then:
+ 1 * getPipelineMock('sh')("scp -r -p SSH_OPTIONS LOCAL SERVER:REMOTE")
+ }
+
+}
diff --git a/jenkins-pipeline-shared-libraries/test/vars/UtilSpec.groovy b/jenkins-pipeline-shared-libraries/test/vars/UtilSpec.groovy
new file mode 100644
index 000000000..f91bb290c
--- /dev/null
+++ b/jenkins-pipeline-shared-libraries/test/vars/UtilSpec.groovy
@@ -0,0 +1,1609 @@
+import com.homeaway.devtools.jenkins.testing.JenkinsPipelineSpecification
+import groovy.json.JsonSlurper
+import hudson.plugins.git.GitSCM
+
+class UtilSpec extends JenkinsPipelineSpecification {
+ def groovyScript = null
+ def projectBranchMappingProperties = null
+
+ def setup() {
+ groovyScript = loadPipelineScriptForTest("vars/util.groovy")
+ explicitlyMockPipelineVariable("out")
+ explicitlyMockPipelineVariable("KEYTAB_FILE")
+ }
+
+ def "[util.groovy] getProject"() {
+ when:
+ def project = groovyScript.getProject('https://github.com/apache/incubator-kie-kogito-pipelines.git')
+ then:
+ project == 'apache/incubator-kie-kogito-pipelines'
+ }
+
+ def "[util.groovy] getGroup"() {
+ when:
+ def group = groovyScript.getGroup('https://github.com/apache/incubator-kie-kogito-pipelines.git')
+ then:
+ group == 'apache'
+ }
+
+ def "[util.groovy] getProjectGroupName with group"() {
+ when:
+ def groupName = groovyScript.getProjectGroupName('name', 'group')
+ then:
+ groupName[0] == 'group'
+ groupName[1] == 'name'
+ }
+
+ def "[util.groovy] getProjectGroupName without group"() {
+ when:
+ def groupName = groovyScript.getProjectGroupName('name')
+ then:
+ groupName[0] == 'apache'
+ groupName[1] == 'name'
+ }
+
+ def "[util.groovy] storeGitInformation no previous values"() {
+ setup:
+ def projectGroupName = ['group', 'name']
+ def env = [:]
+ groovyScript.getBinding().setVariable("env", env)
+ when:
+ groovyScript.storeGitInformation('projectName')
+ then:
+ 1 * getPipelineMock('githubscm.getCommit')() >> 'kiegroup/lienzo-core: 0f917d4 Expose zoom and pan filters (#102)'
+ 1 * getPipelineMock('githubscm.getCommitHash')() >> 'ac36137f12d1bcfa5cdf02b796a1a33d251b48e1'
+ 1 * getPipelineMock('githubscm.getBranch')() >> '* (detached from 0f917d4) remotes/origin/main'
+ 1 * getPipelineMock('githubscm.getRemoteInfo')('origin', 'url') >> 'https://github.com/kiegroup/lienzo-core.git'
+ env['GIT_INFORMATION_REPORT'] == "projectName=kiegroup/lienzo-core: 0f917d4 Expose zoom and pan filters (#102) Branch [* (detached from 0f917d4) remotes/origin/main] Remote [https://github.com/kiegroup/lienzo-core.git]"
+ env['GIT_INFORMATION_HASHES'] == "projectName=ac36137f12d1bcfa5cdf02b796a1a33d251b48e1"
+ }
+
+ def "[util.groovy] storeGitInformation with previous values"() {
+ setup:
+ def projectGroupName = ['group', 'name']
+ def env = [:]
+ env['GIT_INFORMATION_REPORT'] = 'projectName=kiegroup/lienzo-tests: 45c16e1 Fix tests (#84) Branch [* (detached from 45c16e1) remotes/origin/main] Remote [https://github.com/kiegroup/lienzo-tests.git]'
+ env['GIT_INFORMATION_HASHES'] = 'projectName=45c16e1'
+ groovyScript.getBinding().setVariable("env", env)
+ when:
+ groovyScript.storeGitInformation('projectName')
+ then:
+ 1 * getPipelineMock('githubscm.getCommit')() >> 'kiegroup/lienzo-core: 0f917d4 Expose zoom and pan filters (#102)'
+ 1 * getPipelineMock('githubscm.getCommitHash')() >> '11111111111111111111111111111111'
+ 1 * getPipelineMock('githubscm.getBranch')() >> '* (detached from 0f917d4) remotes/origin/main'
+ 1 * getPipelineMock('githubscm.getRemoteInfo')('origin', 'url') >> 'https://github.com/kiegroup/lienzo-core.git'
+ env['GIT_INFORMATION_REPORT'] == 'projectName=kiegroup/lienzo-tests: 45c16e1 Fix tests (#84) Branch [* (detached from 45c16e1) remotes/origin/main] Remote [https://github.com/kiegroup/lienzo-tests.git]; projectName=kiegroup/lienzo-core: 0f917d4 Expose zoom and pan filters (#102) Branch [* (detached from 0f917d4) remotes/origin/main] Remote [https://github.com/kiegroup/lienzo-core.git]'
+ env['GIT_INFORMATION_HASHES'] == 'projectName=45c16e1;projectName=11111111111111111111111111111111'
+ }
+
+ def "[util.groovy] printGitInformationReport GIT_INFORMATION_REPORT null"() {
+ setup:
+ def projectGroupName = ['group', 'name']
+ def env = [:]
+ groovyScript.getBinding().setVariable("env", env)
+ when:
+ groovyScript.printGitInformationReport()
+ then:
+ true
+ }
+
+ def "[util.groovy] getProjectDirPath without group"() {
+ setup:
+ def projectGroupName = ['group', 'name']
+ def env = [:]
+ env.put('WORKSPACE', '/workspacefolder')
+ groovyScript.getBinding().setVariable("env", env)
+ when:
+ def result = groovyScript.getProjectDirPath('projectA')
+ then:
+ result == "/workspacefolder/apache_projectA"
+ }
+
+ def "[util.groovy] getNextVersionMicro"() {
+ when:
+ def snapshotVersion = groovyScript.getNextVersion('0.12.0', 'micro')
+ then:
+ '0.12.1-SNAPSHOT' == snapshotVersion
+
+ }
+
+ def "[util.groovy] getNextVersionMinor"() {
+ when:
+ def snapshotVersion = groovyScript.getNextVersion('0.12.1', 'minor')
+ then:
+ '0.13.0-SNAPSHOT' == snapshotVersion
+ }
+
+ def "[util.groovy] getNextVersionMinor no resetSubVersions"() {
+ when:
+ def snapshotVersion = groovyScript.getNextVersion('0.12.1', 'minor', 'SNAPSHOT', false)
+ then:
+ '0.13.1-SNAPSHOT' == snapshotVersion
+ }
+
+ def "[util.groovy] getNextVersionMajor"() {
+ when:
+ def snapshotVersion = groovyScript.getNextVersion('0.12.1', 'major')
+ then:
+ '1.0.0-SNAPSHOT' == snapshotVersion
+ }
+
+ def "[util.groovy] getNextVersionMajor no resetSubVersions"() {
+ when:
+ def snapshotVersion = groovyScript.getNextVersion('0.12.1', 'major', 'SNAPSHOT', false)
+ then:
+ '1.12.1-SNAPSHOT' == snapshotVersion
+ }
+
+ def "[util.groovy] getNextVersionSuffixTest"() {
+ when:
+ def snapshotVersion = groovyScript.getNextVersion('0.12.1', 'major', 'whatever')
+ then:
+ '1.0.0-whatever' == snapshotVersion
+ }
+
+ def "[util.groovy] getNextVersionSuffixTest no resetSubVersions"() {
+ when:
+ def snapshotVersion = groovyScript.getNextVersion('0.12.1', 'major', 'whatever', false)
+ then:
+ '1.12.1-whatever' == snapshotVersion
+ }
+
+ def "[util.groovy] getNextVersionErrorContainsAlphabets"() {
+ when:
+ def checkForAlphabets = groovyScript.getNextVersion('a.12.0', 'micro')
+ then:
+ 1 * getPipelineMock("error").call('Version a.12.0 is not in the required format. The major, minor, and micro parts should contain only numeric characters.')
+ }
+
+ def "[util.groovy] getNextVersionErrorFormat"() {
+ when:
+ def checkForFormatError = groovyScript.getNextVersion('0.12.0.1', 'micro')
+ then:
+ 1 * getPipelineMock("error").call('Version 0.12.0.1 is not in the required format X.Y.Z or X.Y.Z.suffix.')
+ }
+
+ def "[util.groovy] getNextVersion null"() {
+ when:
+ def version = groovyScript.getNextVersion('0.12.0', 'micro', null)
+ then:
+ '0.12.1' == version
+ }
+
+ def "[util.groovy] getNextVersionAssertErrorCheck"() {
+ when:
+ groovyScript.getNextVersion('0.12.0', 'microo')
+ then:
+ thrown(AssertionError)
+ }
+
+ def "[util.groovy] parseVersion correct"() {
+ when:
+ def version = groovyScript.parseVersion('0.12.6598')
+ then:
+ version[0] == 0
+ version[1] == 12
+ version[2] == 6598
+ }
+
+ def "[util.groovy] parseVersion With Suffix Correct"() {
+ when:
+ def version = groovyScript.parseVersion('1.0.0.Final')
+ then:
+ version[0] == 1
+ version[1] == 0
+ version[2] == 0
+ }
+
+ def "[util.groovy] parseVersion Error Contains Alphabets"() {
+ when:
+ groovyScript.parseVersion('a.12.0')
+ then:
+ 1 * getPipelineMock("error").call('Version a.12.0 is not in the required format. The major, minor, and micro parts should contain only numeric characters.')
+ }
+
+ def "[util.groovy] parseVersion Error Format"() {
+ when:
+ groovyScript.parseVersion('0.12.0.1')
+ then:
+ 1 * getPipelineMock("error").call('Version 0.12.0.1 is not in the required format X.Y.Z or X.Y.Z.suffix.')
+ }
+
+ def "[util.groovy] getMajorMinorVersion with correct version"() {
+ when:
+ def result = groovyScript.getMajorMinorVersion('1.36.0')
+ then:
+ result == "1.36"
+ }
+
+ def "[util.groovy] getMajorMinorVersion with incorrect version raises error"() {
+ when:
+ groovyScript.getMajorMinorVersion('ANY_VERSION')
+ then:
+ thrown(Exception)
+ }
+
+
+ def "[util.groovy] getReleaseBranchFromVersion"() {
+ when:
+ def output = groovyScript.getReleaseBranchFromVersion('1.50.425.Final')
+ then:
+ output == '1.50.x'
+ }
+
+ def "[util.groovy] calculateTargetReleaseBranch default"() {
+ when:
+ def version = groovyScript.calculateTargetReleaseBranch('56.34.x')
+ then:
+ version == '56.34.x'
+ }
+
+ def "[util.groovy] calculateTargetReleaseBranch not release branch"() {
+ when:
+ def version = groovyScript.calculateTargetReleaseBranch('anything')
+ then:
+ version == 'anything'
+ }
+
+ def "[util.groovy] calculateTargetReleaseBranch addMajor"() {
+ when:
+ def version = groovyScript.calculateTargetReleaseBranch('56.34.x', 10)
+ then:
+ version == '66.34.x'
+ }
+
+ def "[util.groovy] calculateTargetReleaseBranch addMinor"() {
+ when:
+ def version = groovyScript.calculateTargetReleaseBranch('56.34.x', 0, 15)
+ then:
+ version == '56.49.x'
+ }
+
+ def "[util.groovy] calculateTargetReleaseBranch addMajor addMinor"() {
+ when:
+ def version = groovyScript.calculateTargetReleaseBranch('56.34.x', 10, 15)
+ then:
+ version == '66.49.x'
+ }
+
+ def "[util.groovy] calculateTargetReleaseBranch prod branch"() {
+ when:
+ def version = groovyScript.calculateTargetReleaseBranch('56.34.x-prod')
+ then:
+ version == '56.34.x-prod'
+ }
+
+ def "[util.groovy] calculateTargetReleaseBranch prod branch addMajor"() {
+ when:
+ def version = groovyScript.calculateTargetReleaseBranch('56.34.x-prod', 7)
+ then:
+ version == '63.34.x-prod'
+ }
+
+ def "[util.groovy] calculateTargetReleaseBranch prod branch addMajor addMinor"() {
+ when:
+ def version = groovyScript.calculateTargetReleaseBranch('56.34.x-prod', 7, 10)
+ then:
+ version == '63.44.x-prod'
+ }
+
+ def "[util.groovy] calculateTargetReleaseBranch not release branch with -prod"() {
+ when:
+ def version = groovyScript.calculateTargetReleaseBranch('56.34.x-prod-anything', 7)
+ then:
+ version == '56.34.x-prod-anything'
+ }
+
+ def "[util.groovy] generateHashSize9"() {
+ when:
+ def hash9 = groovyScript.generateHash(9)
+ then:
+ hash9.length() == 9
+ }
+
+ def "[util.groovy] generateHashSize1000"() {
+ when:
+ def hash1000 = groovyScript.generateHash(1000)
+ then:
+ hash1000.length() == 1000
+ }
+
+ def "[util.groovy] generateTempFile"() {
+ when:
+ def result = groovyScript.generateTempFile()
+ then:
+ 1 * getPipelineMock("sh")([returnStdout: true, script: 'mktemp']) >> 'file'
+ result == 'file'
+ }
+
+ def "[util.groovy] generateTempFolder"() {
+ when:
+ def result = groovyScript.generateTempFolder()
+ then:
+ 1 * getPipelineMock("sh")([returnStdout: true, script: 'mktemp -d']) >> 'folder'
+ result == 'folder'
+ }
+
+ def "[util.groovy] executeWithCredentialsMap with token"() {
+ when:
+ groovyScript.executeWithCredentialsMap([token: 'TOKEN']) {
+ sh 'hello'
+ }
+ then:
+ 1 * getPipelineMock('string.call')([credentialsId: 'TOKEN', variable: 'QUAY_TOKEN']) >> 'token'
+ 1 * getPipelineMock('withCredentials')(['token'], _ as Closure)
+ 1 * getPipelineMock("sh")('hello')
+ 0 * getPipelineMock('error').call('No credentials given to execute the given closure')
+ }
+
+ def "[util.groovy] executeWithCredentialsMap with usernamePassword"() {
+ when:
+ groovyScript.executeWithCredentialsMap([usernamePassword: 'USERNAME_PASSWORD']) {
+ sh 'hello'
+ }
+ then:
+ 1 * getPipelineMock('usernamePassword.call')([credentialsId: 'USERNAME_PASSWORD', usernameVariable: 'QUAY_USER', passwordVariable: 'QUAY_TOKEN']) >> 'usernamePassword'
+ 1 * getPipelineMock('withCredentials')(['usernamePassword'], _ as Closure)
+ 1 * getPipelineMock("sh")('hello')
+ 0 * getPipelineMock('error').call('No credentials given to execute the given closure')
+ }
+
+ def "[util.groovy] executeWithCredentialsMap with token and usernamePassword"() {
+ when:
+ groovyScript.executeWithCredentialsMap([token: 'TOKEN', usernamePassword: 'USERNAME_PASSWORD']) {
+ sh 'hello'
+ }
+ then:
+ 1 * getPipelineMock('string.call')([credentialsId: 'TOKEN', variable: 'QUAY_TOKEN']) >> 'token'
+ 1 * getPipelineMock('withCredentials')(['token'], _ as Closure)
+ 1 * getPipelineMock("sh")('hello')
+ 0 * getPipelineMock('error').call('No credentials given to execute the given closure')
+ }
+
+ def "[util.groovy] executeWithCredentialsMap empty"() {
+ when:
+ groovyScript.executeWithCredentialsMap([:]) {
+ sh 'hello'
+ }
+ then:
+ 0 * getPipelineMock('string.call')([credentialsId: 'TOKEN', variable: 'QUAY_TOKEN']) >> 'token'
+ 0 * getPipelineMock('withCredentials')(['token'], _ as Closure)
+ 0 * getPipelineMock('usernamePassword.call')([credentialsId: 'USERNAME_PASSWORD', usernameVariable: 'QUAY_USER', passwordVariable: 'QUAY_TOKEN']) >> 'usernamePassword'
+ 0 * getPipelineMock('withCredentials')(['usernamePassword'], _ as Closure)
+ 0 * getPipelineMock("sh")('hello')
+ 1 * getPipelineMock('error').call('No credentials given to execute the given closure')
+ }
+
+ def "[util.groovy] cleanNode"() {
+ when:
+ groovyScript.cleanNode()
+ then:
+ 0 * getPipelineMock('cloud.cleanContainersAndImages')(_)
+ 1 * getPipelineMock('maven.cleanRepository')()
+ 1 * getPipelineMock('cleanWs.call')()
+ }
+
+ def "[util.groovy] cleanNode with docker"() {
+ when:
+ groovyScript.cleanNode('docker')
+ then:
+ 1 * getPipelineMock('cloud.cleanContainersAndImages')('docker')
+ 1 * getPipelineMock('maven.cleanRepository')()
+ 1 * getPipelineMock('cleanWs.call')()
+ }
+
+ def "[util.groovy] cleanNode with podman"() {
+ when:
+ groovyScript.cleanNode('podman')
+ then:
+ 1 * getPipelineMock('cloud.cleanContainersAndImages')('podman')
+ 1 * getPipelineMock('maven.cleanRepository')()
+ 1 * getPipelineMock('cleanWs.call')()
+ }
+
+ def "[util.groovy] replaceInAllFilesRecursive"() {
+ when:
+ groovyScript.replaceInAllFilesRecursive('pattern*', 'sedpatternval\\', 'newValue')
+ then:
+ 1 * getPipelineMock('sh')('find . -name \'pattern*\' -type f -exec sed -i \'s/sedpatternval\\/newValue/g\' {} \\;')
+ }
+
+ def "[util.groovy] rmPartialDeps"() {
+ setup:
+ def env = [:]
+ env.put('WORKSPACE', '/workspacefolderrmPartialDeps')
+ groovyScript.getBinding().setVariable("env", env)
+
+ when:
+ groovyScript.rmPartialDeps()
+ then:
+ 1 * getPipelineMock('dir')('/workspacefolderrmPartialDeps/.m2', _ as Closure)
+ 1 * getPipelineMock("sh").call('find . -regex ".*\\.part\\(\\.lock\\)?" -exec rm -rf {} \\;')
+ }
+
+
+
+ def "[util.groovy] retrieveConsoleLog no arg"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ when:
+ def result = groovyScript.retrieveConsoleLog()
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 100']) >> 'CONTENT'
+ result == 'CONTENT'
+ }
+
+ def "[util.groovy] retrieveConsoleLog with number of lines"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ when:
+ def result = groovyScript.retrieveConsoleLog(3)
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 3']) >> 'CONTENT'
+ result == 'CONTENT'
+ }
+
+ def "[util.groovy] retrieveConsoleLog with number of lines and build url"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ when:
+ def result = groovyScript.retrieveConsoleLog(2, "BUILD_URL/")
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/consoleText | tail -n 2']) >> 'CONTENT'
+ result == 'CONTENT'
+ }
+
+ def "[util.groovy] archiveConsoleLog no arg"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ when:
+ def result = groovyScript.archiveConsoleLog()
+ then:
+ 1 * getPipelineMock('sh')('rm -rf console.log')
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 100']) >> 'CONTENT'
+ 1 * getPipelineMock('writeFile')([text: 'CONTENT', file: 'console.log'])
+ 1 * getPipelineMock('archiveArtifacts.call')([artifacts: 'console.log'])
+ }
+
+ def "[util.groovy] archiveConsoleLog with id"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ when:
+ def result = groovyScript.archiveConsoleLog('ID', 3)
+ then:
+ 1 * getPipelineMock('sh')('rm -rf ID_console.log')
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 3']) >> 'CONTENT'
+ 1 * getPipelineMock('writeFile')([text: 'CONTENT', file: 'ID_console.log'])
+ 1 * getPipelineMock('archiveArtifacts.call')([artifacts: 'ID_console.log'])
+ }
+
+ def "[util.groovy] archiveConsoleLog with id and number of lines"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ groovyScript.metaClass.generateHash = { int size ->
+ return 'GENERATED_ID'
+ }
+ when:
+ def result = groovyScript.archiveConsoleLog('ID', 3)
+ then:
+ 1 * getPipelineMock('sh')('rm -rf ID_console.log')
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 3']) >> 'CONTENT'
+ 1 * getPipelineMock('writeFile')([text: 'CONTENT', file: 'ID_console.log'])
+ 1 * getPipelineMock('archiveArtifacts.call')([artifacts: 'ID_console.log'])
+ }
+
+ def "[util.groovy] archiveConsoleLog with id, number of lines and build url"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ groovyScript.metaClass.generateHash = { int size ->
+ return 'GENERATED_ID'
+ }
+ when:
+ def result = groovyScript.archiveConsoleLog('ID', 3, 'BUILD_URL/')
+ then:
+ 1 * getPipelineMock('sh')('rm -rf ID_console.log')
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/consoleText | tail -n 3']) >> 'CONTENT'
+ 1 * getPipelineMock('writeFile')([text: 'CONTENT', file: 'ID_console.log'])
+ 1 * getPipelineMock('archiveArtifacts.call')([artifacts: 'ID_console.log'])
+ }
+
+ def "[util.groovy] retrieveTestResults no arg"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ when:
+ def result = groovyScript.retrieveTestResults()
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'CONTENT'
+ 1 * getPipelineMock('readJSON')([text: 'CONTENT']) >> [ hello : 'anything' ]
+ result.hello == 'anything'
+ }
+
+ def "[util.groovy] retrieveTestResults with build url"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ when:
+ def result = groovyScript.retrieveTestResults("BUILD_URL/")
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/testReport/api/json?depth=1']) >> 'CONTENT'
+ 1 * getPipelineMock('readJSON')([text: 'CONTENT']) >> [ hello : 'anything' ]
+ result.hello == 'anything'
+ }
+
+ def "[util.groovy] retrieveFailedTests no arg"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ def failedTests = [
+ suites: [
+ [
+ cases: [
+ [
+ status: 'FAILED',
+ className: 'package1.class1',
+ name: 'test',
+ errorDetails: 'details package1.class1.test1',
+ errorStackTrace: 'trace package1.class1.test1'
+ ],
+ [
+ status: 'SKIPPED',
+ className: 'package1.class2.',
+ name: 'test'
+ ]
+ ]
+ ],
+ [
+ cases: [
+ [
+ status: 'FAILED',
+ className: 'package2.class1',
+ name: '(?)',
+ errorDetails: 'details package2.class1.(?)',
+ errorStackTrace: 'trace package2.class1.(?)'
+ ],
+ [
+ status: 'PASSED',
+ className: 'package2.class2',
+ name: 'test'
+ ],
+ [
+ status: 'REGRESSION',
+ className: 'package2.class2',
+ name: 'test2',
+ errorDetails: 'details package2.class2.test2',
+ errorStackTrace: 'trace package2.class2.test2'
+ ],
+ ]
+ ]
+ ]
+ ]
+ when:
+ def result = groovyScript.retrieveFailedTests()
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'CONTENT'
+ 1 * getPipelineMock('readJSON')([text: 'CONTENT']) >> failedTests
+ result.size() == 3
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test'}
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test'}.fullName == 'package1.class1.test'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test'}.url == 'URL/testReport/package1/class1/test/'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test'}.details == 'details package1.class1.test1'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test'}.stacktrace == 'trace package1.class1.test1'
+
+ result.find { it.packageName == 'package2' && it.className == 'class1' && it.name == '(?)'}
+ result.find { it.packageName == 'package2' && it.className == 'class1' && it.name == '(?)'}.fullName == 'package2.class1.(?)'
+ result.find { it.packageName == 'package2' && it.className == 'class1' && it.name == '(?)'}.url == 'URL/testReport/package2/class1/___/'
+ result.find { it.packageName == 'package2' && it.className == 'class1' && it.name == '(?)'}.details == 'details package2.class1.(?)'
+ result.find { it.packageName == 'package2' && it.className == 'class1' && it.name == '(?)'}.stacktrace == 'trace package2.class1.(?)'
+
+ result.find { it.packageName == 'package2' && it.className == 'class2' && it.name == 'test2'}
+ result.find { it.packageName == 'package2' && it.className == 'class2' && it.name == 'test2'}.fullName == 'package2.class2.test2'
+ result.find { it.packageName == 'package2' && it.className == 'class2' && it.name == 'test2'}.url == 'URL/testReport/package2/class2/test2/'
+ result.find { it.packageName == 'package2' && it.className == 'class2' && it.name == 'test2'}.details == 'details package2.class2.test2'
+ result.find { it.packageName == 'package2' && it.className == 'class2' && it.name == 'test2'}.stacktrace == 'trace package2.class2.test2'
+ }
+
+ def "[util.groovy] retrieveFailedTests with build url"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL/', 'URL/')
+ def failedTests = [
+ suites: [
+ [
+ cases: [
+ [
+ status: 'FAILED',
+ className: 'package1.class1',
+ name: 'test',
+ errorDetails: 'details package1.class1.test1',
+ errorStackTrace: 'trace package1.class1.test1'
+ ],
+ [
+ status: 'SKIPPED',
+ className: 'package1.class2.',
+ name: 'test'
+ ]
+ ]
+ ],
+ [
+ cases: [
+ [
+ status: 'FAILED',
+ className: 'package2.class1',
+ name: '(?)',
+ errorDetails: 'details package2.class1.(?)',
+ errorStackTrace: 'trace package2.class1.(?)'
+ ],
+ [
+ status: 'PASSED',
+ className: 'package2.class2',
+ name: 'test'
+ ],
+ [
+ status: 'REGRESSION',
+ className: 'package2.class2',
+ name: 'test2',
+ errorDetails: 'details package2.class2.test2',
+ errorStackTrace: 'trace package2.class2.test2'
+ ],
+ ]
+ ]
+ ]
+ ]
+ when:
+ def result = groovyScript.retrieveFailedTests('BUILD_URL/')
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/testReport/api/json?depth=1']) >> 'CONTENT'
+ 1 * getPipelineMock('readJSON')([text: 'CONTENT']) >> failedTests
+ result.size() == 3
+
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test'}
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test'}.fullName == 'package1.class1.test'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test'}.url == 'BUILD_URL/testReport/package1/class1/test/'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test'}.details == 'details package1.class1.test1'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test'}.stacktrace == 'trace package1.class1.test1'
+
+ result.find { it.packageName == 'package2' && it.className == 'class1' && it.name == '(?)'}
+ result.find { it.packageName == 'package2' && it.className == 'class1' && it.name == '(?)'}.fullName == 'package2.class1.(?)'
+ result.find { it.packageName == 'package2' && it.className == 'class1' && it.name == '(?)'}.url == 'BUILD_URL/testReport/package2/class1/___/'
+ result.find { it.packageName == 'package2' && it.className == 'class1' && it.name == '(?)'}.details == 'details package2.class1.(?)'
+ result.find { it.packageName == 'package2' && it.className == 'class1' && it.name == '(?)'}.stacktrace == 'trace package2.class1.(?)'
+
+ result.find { it.packageName == 'package2' && it.className == 'class2' && it.name == 'test2'}
+ result.find { it.packageName == 'package2' && it.className == 'class2' && it.name == 'test2'}.fullName == 'package2.class2.test2'
+ result.find { it.packageName == 'package2' && it.className == 'class2' && it.name == 'test2'}.url == 'BUILD_URL/testReport/package2/class2/test2/'
+ result.find { it.packageName == 'package2' && it.className == 'class2' && it.name == 'test2'}.details == 'details package2.class2.test2'
+ result.find { it.packageName == 'package2' && it.className == 'class2' && it.name == 'test2'}.stacktrace == 'trace package2.class2.test2'
+ }
+
+ def "[util.groovy] retrieveFailedTests with multiple test cases with same name"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ def failedTests = [
+ suites: [
+ [
+ cases: [
+ [
+ status: 'FAILED',
+ className: 'package1.class1',
+ name: 'test',
+ errorDetails: 'details package1.class1.test1',
+ errorStackTrace: 'trace package1.class1.test1'
+ ],
+ [
+ status: 'SKIPPED',
+ className: 'package1.class2.',
+ name: 'test'
+ ]
+ ],
+ enclosingBlockNames : [
+ 'Test kogito-runtime-jvm',
+ 'Build&Test kogito-runtime-jvm',
+ 'Build & Test Images'
+ ]
+ ],
+ [
+ cases: [
+ [
+ status: 'FAILED',
+ className: 'package1.class1',
+ name: 'test',
+ errorDetails: 'details package1.class1.test1',
+ errorStackTrace: 'trace package1.class1.test1'
+ ],
+ [
+ status: 'SKIPPED',
+ className: 'package1.class2.',
+ name: 'test'
+ ]
+ ],
+ enclosingBlockNames : [
+ 'Test kogito-runtime-native',
+ 'Build&Test kogito-runtime-native',
+ 'Build & Test Images'
+ ]
+ ]
+ ]
+ ]
+ when:
+ def result = groovyScript.retrieveFailedTests()
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'CONTENT'
+ 1 * getPipelineMock('readJSON')([text: 'CONTENT']) >> failedTests
+ result.size() == 2
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-jvm / Test kogito-runtime-jvm'}
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-jvm / Test kogito-runtime-jvm'}.fullName == 'Build & Test Images / Build&Test kogito-runtime-jvm / Test kogito-runtime-jvm / package1.class1.test'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-jvm / Test kogito-runtime-jvm'}.url == 'URL/testReport/package1/class1/Build___Test_Images___Build_Test_kogito_runtime_jvm___Test_kogito_runtime_jvm___test/'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-jvm / Test kogito-runtime-jvm'}.details == 'details package1.class1.test1'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-jvm / Test kogito-runtime-jvm'}.stacktrace == 'trace package1.class1.test1'
+
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-native / Test kogito-runtime-native'}
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-native / Test kogito-runtime-native'}.fullName == 'Build & Test Images / Build&Test kogito-runtime-native / Test kogito-runtime-native / package1.class1.test'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-native / Test kogito-runtime-native'}.url == 'URL/testReport/package1/class1/Build___Test_Images___Build_Test_kogito_runtime_native___Test_kogito_runtime_native___test/'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-native / Test kogito-runtime-native'}.details == 'details package1.class1.test1'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-native / Test kogito-runtime-native'}.stacktrace == 'trace package1.class1.test1'
+ }
+
+ def "[util.groovy] retrieveFailedTests with enclosingBlockNames"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ def failedTests = [
+ suites: [
+ [
+ cases: [
+ [
+ status: 'FAILED',
+ className: 'package1.class1',
+ name: 'test',
+ errorDetails: 'details package1.class1.test1',
+ errorStackTrace: 'trace package1.class1.test1'
+ ],
+ [
+ status: 'SKIPPED',
+ className: 'package1.class2.',
+ name: 'test'
+ ]
+ ],
+ enclosingBlockNames : [
+ 'Test kogito-runtime-jvm',
+ 'Build&Test kogito-runtime-jvm',
+ 'Build & Test Images'
+ ]
+ ]
+ ]
+ ]
+ when:
+ def result = groovyScript.retrieveFailedTests()
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'CONTENT'
+ 1 * getPipelineMock('readJSON')([text: 'CONTENT']) >> failedTests
+ result.size() == 1
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-jvm / Test kogito-runtime-jvm'}
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-jvm / Test kogito-runtime-jvm'}.fullName == 'package1.class1.test'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-jvm / Test kogito-runtime-jvm'}.url == 'URL/testReport/package1/class1/test/'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-jvm / Test kogito-runtime-jvm'}.details == 'details package1.class1.test1'
+ result.find { it.packageName == 'package1' && it.className == 'class1' && it.name == 'test' && it.enclosingBlockNames == 'Build & Test Images / Build&Test kogito-runtime-jvm / Test kogito-runtime-jvm'}.stacktrace == 'trace package1.class1.test1'
+ }
+
+ def "[util.groovy] retrieveArtifact default file exists"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ when:
+ def result = groovyScript.retrieveArtifact('PATH')
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "curl -o /dev/null --silent -Iw '%{http_code}' URL/artifact/PATH"]) >> '200'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/artifact/PATH']) >> 'CONTENT'
+ result == 'CONTENT'
+ }
+
+
+ def "[util.groovy] retrieveArtifact default file NOT exists"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ when:
+ def result = groovyScript.retrieveArtifact('PATH')
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "curl -o /dev/null --silent -Iw '%{http_code}' URL/artifact/PATH"]) >> '404'
+ 0 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/artifact/PATH']) >> 'CONTENT'
+ result == ''
+ }
+
+
+ def "[util.groovy] retrieveArtifact with build url and file exists"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ when:
+ def result = groovyScript.retrieveArtifact('PATH', 'BUILD_URL/')
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "curl -o /dev/null --silent -Iw '%{http_code}' BUILD_URL/artifact/PATH"]) >> '200'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/artifact/PATH']) >> 'CONTENT'
+ result == 'CONTENT'
+ }
+
+
+ def "[util.groovy] retrieveArtifact with build url and file NOT exists"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ when:
+ def result = groovyScript.retrieveArtifact('PATH', 'BUILD_URL/')
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "curl -o /dev/null --silent -Iw '%{http_code}' BUILD_URL/artifact/PATH"]) >> '404'
+ 0 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/artifact/PATH']) >> 'CONTENT'
+ result == ''
+ }
+
+ def "[util.groovy] retrieveJobInformation no arg"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ def jobMock = [
+ url: 'ANY_URL'
+ ]
+ when:
+ def result = groovyScript.retrieveJobInformation()
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/api/json?depth=0']) >> 'CONTENT'
+ 1 * getPipelineMock('readJSON')([text: 'CONTENT']) >> jobMock
+ result.url == 'ANY_URL'
+ }
+
+ def "[util.groovy] retrieveJobInformation with build url"() {
+ setup:
+ groovyScript.getBinding().setVariable('BUILD_URL', 'URL/')
+ def jobMock = [
+ url: 'ANY_URL'
+ ]
+ when:
+ def result = groovyScript.retrieveJobInformation('BUILD_URL/')
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/api/json?depth=0']) >> 'CONTENT'
+ 1 * getPipelineMock('readJSON')([text: 'CONTENT']) >> jobMock
+ result.url == 'ANY_URL'
+ }
+
+ def "[util.groovy] getMarkdownTestSummary job success with no job id and no build url"() {
+ setup:
+ groovyScript.getBinding().setVariable("BUILD_URL", 'URL/')
+ groovyScript.getBinding().setVariable("BUILD_NUMBER", '256')
+ def jobMock = [ result: 'SUCCESS' ]
+ when:
+ def result = groovyScript.getMarkdownTestSummary()
+ then:
+ // retrieveConsoleLog
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 50']) >> 'this is the console'
+ // retrieveJobInformation
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/api/json?depth=0']) >> 'CONTENT'
+ 1 * getPipelineMock('readJSON')([text: 'CONTENT']) >> jobMock
+
+ // check result
+ result == '''
+Job #256 was: **SUCCESS**
+'''
+ }
+
+ def "[util.groovy] getMarkdownTestSummary job success with no build url"() {
+ setup:
+ groovyScript.getBinding().setVariable("BUILD_URL", 'URL/')
+ groovyScript.getBinding().setVariable("BUILD_NUMBER", '256')
+ def jobMock = [ result: 'SUCCESS' ]
+ when:
+ def result = groovyScript.getMarkdownTestSummary('JOB_ID')
+ then:
+ // retrieveConsoleLog
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 50']) >> 'this is the console'
+ // retrieveJobInformation
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/api/json?depth=0']) >> 'CONTENT'
+ 1 * getPipelineMock('readJSON')([text: 'CONTENT']) >> jobMock
+
+ // check result
+ result == '''
+**JOB_ID job** #256 was: **SUCCESS**
+'''
+ }
+
+ def "[util.groovy] getMarkdownTestSummary job success with additional info"() {
+ setup:
+ groovyScript.getBinding().setVariable("BUILD_URL", 'URL/')
+ groovyScript.getBinding().setVariable("BUILD_NUMBER", '256')
+ def jobMock = [ result: 'SUCCESS' ]
+ when:
+ def result = groovyScript.getMarkdownTestSummary('JOB_ID', 'ADDITIONAL_INFO')
+ then:
+ // retrieveConsoleLog
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 50']) >> 'this is the console'
+ // retrieveJobInformation
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/api/json?depth=0']) >> 'CONTENT'
+ 1 * getPipelineMock('readJSON')([text: 'CONTENT']) >> jobMock
+
+ // check result
+ result == '''
+**JOB_ID job** #256 was: **SUCCESS**
+
+ADDITIONAL_INFO
+'''
+ }
+
+ def "[util.groovy] getMarkdownTestSummary job success with additonal info, build url"() {
+ setup:
+ groovyScript.getBinding().setVariable("BUILD_URL", 'URL/')
+ groovyScript.getBinding().setVariable("BUILD_NUMBER", '256')
+ def jobMock = [ result: 'SUCCESS' ]
+ when:
+ def result = groovyScript.getMarkdownTestSummary('JOB_ID', 'ADDITIONAL_INFO', 'BUILD_URL/')
+ then:
+ // retrieveConsoleLog
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/consoleText | tail -n 50']) >> 'this is the console'
+ // retrieveJobInformation
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/api/json?depth=0']) >> 'CONTENT'
+ 1 * getPipelineMock('readJSON')([text: 'CONTENT']) >> jobMock
+
+ // check result
+ result == '''
+**JOB_ID job** #256 was: **SUCCESS**
+
+ADDITIONAL_INFO
+'''
+ }
+
+ def "[util.groovy] getMarkdownTestSummary job failure"() {
+ setup:
+ groovyScript.getBinding().setVariable("BUILD_URL", 'URL/')
+ groovyScript.getBinding().setVariable("BUILD_NUMBER", '256')
+ def jobMock = [ result: 'FAILURE' ]
+ def testResultsMock = [ passCount: 254, failCount: 0 ]
+ def failedTestsMock = [:]
+ when:
+ def result = groovyScript.getMarkdownTestSummary('JOB_ID')
+ then:
+ // retrieveJobInformation
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/api/json?depth=0']) >> 'JOB_INFO'
+ 1 * getPipelineMock('readJSON')([text: 'JOB_INFO']) >> jobMock
+ // retrieveConsoleLog
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 50']) >> 'this is the console'
+ // retrieveTestResults
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'TEST_RESULTS'
+ 1 * getPipelineMock('readJSON')([text: 'TEST_RESULTS']) >> testResultsMock
+ // retrieveFailedTests
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'FAILED_TESTS'
+ 1 * getPipelineMock('readJSON')([text: 'FAILED_TESTS']) >> failedTestsMock
+
+ // check result
+ result == '''
+**JOB_ID job** #256 was: **FAILURE**
+Possible explanation: Pipeline failure or project build failure
+
+Please look here: URL/display/redirect
+
+**Test results:**
+- PASSED: 254
+- FAILED: 0
+
+Those are the test failures: none
+
+See console log:
+```spoiler Console Logs
+this is the console
+```
+'''
+ }
+
+ def "[util.groovy] getMarkdownTestSummary job failure with additional info"() {
+ setup:
+ groovyScript.getBinding().setVariable("BUILD_URL", 'URL/')
+ groovyScript.getBinding().setVariable("BUILD_NUMBER", '256')
+ def jobMock = [ result: 'FAILURE' ]
+ def testResultsMock = [ passCount: 254, failCount: 0 ]
+ def failedTestsMock = [:]
+ when:
+ def result = groovyScript.getMarkdownTestSummary('JOB_ID', 'ADDITIONAL_INFO')
+ then:
+ // retrieveConsoleLog
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 50']) >> 'this is the console'
+ // retrieveJobInformation
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/api/json?depth=0']) >> 'JOB_INFO'
+ 1 * getPipelineMock('readJSON')([text: 'JOB_INFO']) >> jobMock
+ // retrieveTestResults
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'TEST_RESULTS'
+ 1 * getPipelineMock('readJSON')([text: 'TEST_RESULTS']) >> testResultsMock
+ // retrieveFailedTests
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'FAILED_TESTS'
+ 1 * getPipelineMock('readJSON')([text: 'FAILED_TESTS']) >> failedTestsMock
+
+ // check result
+ result == '''
+**JOB_ID job** #256 was: **FAILURE**
+Possible explanation: Pipeline failure or project build failure
+
+ADDITIONAL_INFO
+
+Please look here: URL/display/redirect
+
+**Test results:**
+- PASSED: 254
+- FAILED: 0
+
+Those are the test failures: none
+
+See console log:
+```spoiler Console Logs
+this is the console
+```
+'''
+ }
+
+ def "[util.groovy] getMarkdownTestSummary job failure with additional info and build url"() {
+ setup:
+ groovyScript.getBinding().setVariable("BUILD_URL", 'URL/')
+ groovyScript.getBinding().setVariable("BUILD_NUMBER", '256')
+ def jobMock = [
+ result: 'FAILURE',
+ artifacts: [
+ [
+ fileName: 'this_should_not_be_handled_console.log.me'
+ ],
+ ]
+ ]
+ def testResultsMock = [ passCount: 254, failCount: 0 ]
+ def failedTestsMock = [:]
+ when:
+ def result = groovyScript.getMarkdownTestSummary('JOB_ID', 'ADDITIONAL_INFO', 'BUILD_URL/')
+ then:
+ // retrieveJobInformation
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/api/json?depth=0']) >> 'JOB_INFO'
+ 1 * getPipelineMock('readJSON')([text: 'JOB_INFO']) >> jobMock
+ // retrieveConsoleLog
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/consoleText | tail -n 50']) >> 'this is the console'
+ // retrieveTestResults
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/testReport/api/json?depth=1']) >> 'TEST_RESULTS'
+ 1 * getPipelineMock('readJSON')([text: 'TEST_RESULTS']) >> testResultsMock
+ // retrieveFailedTests
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - BUILD_URL/testReport/api/json?depth=1']) >> 'FAILED_TESTS'
+ 1 * getPipelineMock('readJSON')([text: 'FAILED_TESTS']) >> failedTestsMock
+
+ // check result
+ result == '''
+**JOB_ID job** #256 was: **FAILURE**
+Possible explanation: Pipeline failure or project build failure
+
+ADDITIONAL_INFO
+
+Please look here: BUILD_URL/display/redirect
+
+**Test results:**
+- PASSED: 254
+- FAILED: 0
+
+Those are the test failures: none
+
+See console log:
+```spoiler Console Logs
+this is the console
+```
+'''
+ }
+
+ def "[util.groovy] getMarkdownTestSummary job failure with console artifact existing"() {
+ setup:
+ groovyScript.getBinding().setVariable("BUILD_URL", 'URL/')
+ groovyScript.getBinding().setVariable("BUILD_NUMBER", '256')
+ def jobMock = [
+ result: 'FAILURE',
+ artifacts: [
+ [
+ fileName: 'console.log'
+ ],
+ [
+ fileName: 'any_console.log'
+ ],
+ [
+ fileName: 'Another_console.log'
+ ],
+ [
+ fileName: 'this_should_not_be_handled_console.log.me'
+ ],
+ ]
+ ]
+ def testResultsMock = [ passCount: 254, failCount: 0 ]
+ def failedTestsMock = [:]
+ when:
+ def result = groovyScript.getMarkdownTestSummary('JOB_ID')
+ then:
+ // retrieveJobInformation
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/api/json?depth=0']) >> 'JOB_INFO'
+ 1 * getPipelineMock('readJSON')([text: 'JOB_INFO']) >> jobMock
+ // retrieveArtifact
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "curl -o /dev/null --silent -Iw '%{http_code}' URL/artifact/console.log"]) >> '200'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/artifact/console.log']) >> 'this is the console artifact'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "curl -o /dev/null --silent -Iw '%{http_code}' URL/artifact/any_console.log"]) >> '200'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/artifact/any_console.log']) >> 'this is the any_console artifact'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "curl -o /dev/null --silent -Iw '%{http_code}' URL/artifact/Another_console.log"]) >> '200'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/artifact/Another_console.log']) >> 'this is the Another_console artifact'
+ 0 * getPipelineMock('sh')([returnStdout: true, script: "curl -o /dev/null --silent -Iw '%{http_code}' URL/artifact/this_should_not_be_handled_console.log.me"]) >> '200'
+ 0 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/artifact/this_should_not_be_handled_console.log.me'])
+ // retrieveConsoleLog
+ 0 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 50']) >> 'this is the console'
+ // retrieveTestResults
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'TEST_RESULTS'
+ 1 * getPipelineMock('readJSON')([text: 'TEST_RESULTS']) >> testResultsMock
+ // retrieveFailedTests
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'FAILED_TESTS'
+ 1 * getPipelineMock('readJSON')([text: 'FAILED_TESTS']) >> failedTestsMock
+
+ // check result
+ result == '''
+**JOB_ID job** #256 was: **FAILURE**
+Possible explanation: Pipeline failure or project build failure
+
+Please look here: URL/display/redirect
+
+**Test results:**
+- PASSED: 254
+- FAILED: 0
+
+Those are the test failures: none
+
+See console log:
+```spoiler Console Logs
+this is the console artifact
+```
+```spoiler any
+this is the any_console artifact
+```
+```spoiler Another
+this is the Another_console artifact
+```
+'''
+ }
+
+ def "[util.groovy] getMarkdownTestSummary job failure with no test results"() {
+ setup:
+ groovyScript.getBinding().setVariable("BUILD_URL", 'URL/')
+ groovyScript.getBinding().setVariable("BUILD_NUMBER", '256')
+ def jobMock = [ result: 'FAILURE' ]
+ def testResultsMock = [ passCount: 254, failCount: 635 ]
+ def failedTestsMock = [:]
+ when:
+ def result = groovyScript.getMarkdownTestSummary('JOB_ID')
+ then:
+ // retrieveJobInformation
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/api/json?depth=0']) >> 'JOB_INFO'
+ 1 * getPipelineMock('readJSON')([text: 'JOB_INFO']) >> jobMock
+ // retrieveConsoleLog
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 50']) >> 'this is the console'
+ // retrieveTestResults
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> { throw new Exception('unknown URL') }
+ 0 * getPipelineMock('readJSON')([text: 'TEST_RESULTS']) >> testResultsMock
+ // retrieveFailedTests
+ 0 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'FAILED_TESTS'
+ 0 * getPipelineMock('readJSON')([text: 'FAILED_TESTS']) >> failedTestsMock
+
+ // check result
+ result == '''
+**JOB_ID job** #256 was: **FAILURE**
+Possible explanation: Pipeline failure or project build failure
+
+Please look here: URL/display/redirect
+See console log:
+```spoiler Console Logs
+this is the console
+```
+'''
+ }
+
+ def "[util.groovy] getMarkdownTestSummary job failure with failed tests"() {
+ setup:
+ groovyScript.getBinding().setVariable("BUILD_URL", 'URL/')
+ groovyScript.getBinding().setVariable("BUILD_NUMBER", '256')
+ def jobMock = [ result: 'FAILURE' ]
+ def testResultsMock = [ passCount: 254, failCount: 2 ]
+ def failedTestsMock = [
+ suites: [
+ [
+ cases: [
+ [
+ status: 'FAILED',
+ className: 'package1.class1',
+ name: 'test',
+ errorDetails: 'details package1.class1.test',
+ errorStackTrace: 'trace package1.class1.test'
+ ],
+ [
+ status: 'FAILED',
+ className: 'package1.class2',
+ name: 'test',
+ errorDetails: null,
+ errorStackTrace: 'trace package1.class2.test'
+ ]
+ ]
+ ]
+ ]
+ ]
+ when:
+ def result = groovyScript.getMarkdownTestSummary('JOB_ID')
+ then:
+ // retrieveJobInformation
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/api/json?depth=0']) >> 'JOB_INFO'
+ 1 * getPipelineMock('readJSON')([text: 'JOB_INFO']) >> jobMock
+ // retrieveConsoleLog
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 50']) >> 'this is the console'
+ // retrieveTestResults
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'TEST_RESULTS'
+ 1 * getPipelineMock('readJSON')([text: 'TEST_RESULTS']) >> testResultsMock
+ // retrieveFailedTests
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'FAILED_TESTS'
+ 1 * getPipelineMock('readJSON')([text: 'FAILED_TESTS']) >> failedTestsMock
+
+ // check result
+ result == '''
+**JOB_ID job** #256 was: **FAILURE**
+Possible explanation: Pipeline failure or project build failure
+
+Please look here: URL/display/redirect
+
+**Test results:**
+- PASSED: 254
+- FAILED: 2
+
+Those are the test failures:
+```spoiler [package1.class1.test](URL/testReport/package1/class1/test/)
+details package1.class1.test
+```
+```spoiler [package1.class2.test](URL/testReport/package1/class2/test/)
+trace package1.class2.test
+```
+
+See console log:
+```spoiler Console Logs
+this is the console
+```
+'''
+ }
+
+ def "[util.groovy] getMarkdownTestSummary job unstable with failed tests"() {
+ setup:
+ groovyScript.getBinding().setVariable("BUILD_URL", 'URL/')
+ groovyScript.getBinding().setVariable("BUILD_NUMBER", '256')
+ def jobMock = [ result: 'UNSTABLE' ]
+ def testResultsMock = [ passCount: 254, failCount: 2 ]
+ def failedTestsMock = [
+ suites: [
+ [
+ cases: [
+ [
+ status: 'FAILED',
+ className: 'package1.class1',
+ name: 'test',
+ errorDetails: 'details package1.class1.test'
+ ],
+ [
+ status: 'FAILED',
+ className: 'package1.class2',
+ name: 'test',
+ errorDetails: 'details package1.class2.test'
+ ]
+ ]
+ ]
+ ]
+ ]
+ when:
+ def result = groovyScript.getMarkdownTestSummary('JOB_ID')
+ then:
+ // retrieveJobInformation
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/api/json?depth=0']) >> 'JOB_INFO'
+ 1 * getPipelineMock('readJSON')([text: 'JOB_INFO']) >> jobMock
+ // retrieveConsoleLog
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 50']) >> 'this is the console'
+ // retrieveTestResults
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'TEST_RESULTS'
+ 1 * getPipelineMock('readJSON')([text: 'TEST_RESULTS']) >> testResultsMock
+ // retrieveFailedTests
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'FAILED_TESTS'
+ 1 * getPipelineMock('readJSON')([text: 'FAILED_TESTS']) >> failedTestsMock
+
+ // check result
+ result == '''
+**JOB_ID job** #256 was: **UNSTABLE**
+Possible explanation: This should be test failures
+
+Please look here: URL/display/redirect
+
+**Test results:**
+- PASSED: 254
+- FAILED: 2
+
+Those are the test failures:
+```spoiler [package1.class1.test](URL/testReport/package1/class1/test/)
+details package1.class1.test
+```
+```spoiler [package1.class2.test](URL/testReport/package1/class2/test/)
+details package1.class2.test
+```
+'''
+ }
+
+ def "[util.groovy] getMarkdownTestSummary job unstable with failed tests and GITHUB output"() {
+ setup:
+ groovyScript.getBinding().setVariable("BUILD_URL", 'URL/')
+ groovyScript.getBinding().setVariable("BUILD_NUMBER", '256')
+ def jobMock = [ result: 'FAILURE' ]
+ def testResultsMock = [ passCount: 254, failCount: 2 ]
+ def failedTestsMock = [
+ suites: [
+ [
+ cases: [
+ [
+ status: 'FAILED',
+ className: 'package1.class1',
+ name: 'test',
+ errorDetails: 'details package1.class1.test'
+ ],
+ [
+ status: 'FAILED',
+ className: 'package1.class2',
+ name: 'test',
+ errorStackTrace: 'stacktrace package1.class2.test\nstacktrace line 2\nstacktrace line 3'
+ ]
+ ]
+ ]
+ ]
+ ]
+ when:
+ def result = groovyScript.getMarkdownTestSummary('JOB_ID', '', "URL/", 'GITHUB')
+ then:
+ // retrieveJobInformation
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/api/json?depth=0']) >> 'JOB_INFO'
+ 1 * getPipelineMock('readJSON')([text: 'JOB_INFO']) >> jobMock
+ // retrieveConsoleLog
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/consoleText | tail -n 50']) >> 'this is the console\nanother line'
+ // retrieveTestResults
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'TEST_RESULTS'
+ 1 * getPipelineMock('readJSON')([text: 'TEST_RESULTS']) >> testResultsMock
+ // retrieveFailedTests
+ 1 * getPipelineMock('sh')([returnStdout: true, script: 'wget --no-check-certificate -qO - URL/testReport/api/json?depth=1']) >> 'FAILED_TESTS'
+ 1 * getPipelineMock('readJSON')([text: 'FAILED_TESTS']) >> failedTestsMock
+
+ // check result
+ result == '''
+**JOB_ID job** `#256` was: **FAILURE**
+Possible explanation: Pipeline failure or project build failure
+
+Please look here: URL/display/redirect
+
+**Test results:**
+- PASSED: 254
+- FAILED: 2
+
+Those are the test failures:
+
+package1.class1.test
+details package1.class1.test
+
+
+package1.class2.test
+stacktrace package1.class2.test
stacktrace line 2
stacktrace line 3
+
+
+See console log:
+
+Console Logs
+this is the console
another line
+
+'''
+ }
+
+ def "[util.groovy] multiple serializeQueryParams serialize map to query url"() {
+ setup:
+ def params = [q: 'value', k: 3]
+ when:
+ def result = groovyScript.serializeQueryParams(params)
+ then:
+ result == 'q=value&k=3'
+ }
+ def "[util.groovy] single serializeQueryParams serialize map to query url"() {
+ setup:
+ def params = [q: 'value']
+ when:
+ def result = groovyScript.serializeQueryParams(params)
+ then:
+ result == 'q=value'
+ }
+
+ def "[util.groovy] withKerberos using default succeeded"() {
+ setup:
+ def env = [:]
+ groovyScript.getBinding().setVariable("env", env)
+ // simulate withCredentials binding
+ groovyScript.getBinding().setVariable('KEYTAB_FILE', 'path/to/file')
+ when:
+ groovyScript.withKerberos('keytab-id') {
+ sh 'hello'
+ }
+ then:
+ 1 * getPipelineMock('file.call')([credentialsId: 'keytab-id', variable: 'KEYTAB_FILE']) >> 'path/to/file'
+ 1 * getPipelineMock('withCredentials')(['path/to/file'], _ as Closure)
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist -kt path/to/file |grep REDHAT.COM | awk -F' ' 'NR==1{print \$4}' "]) >> 'service-account'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist | grep -i 'Default principal' | awk -F':' 'NR==1{print \$2}' "]) >> ''
+ 1 * getPipelineMock("sh")('hello')
+ 1 * getPipelineMock('sh')([returnStatus: true, script: "kinit service-account -kt path/to/file"]) >> 0
+ env['KERBEROS_PRINCIPAL'] == 'service-account'
+ noExceptionThrown()
+ }
+
+ def "[util.groovy] withKerberos using default succeeded after 2 failure"() {
+ setup:
+ def env = [:]
+ groovyScript.getBinding().setVariable("env", env)
+ // simulate withCredentials binding
+ groovyScript.getBinding().setVariable('KEYTAB_FILE', 'path/to/file')
+ when:
+ groovyScript.withKerberos('keytab-id') {
+ sh 'hello'
+ }
+ then:
+ 1 * getPipelineMock('file.call')([credentialsId: 'keytab-id', variable: 'KEYTAB_FILE']) >> 'path/to/file'
+ 1 * getPipelineMock('withCredentials')(['path/to/file'], _ as Closure)
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist -kt path/to/file |grep REDHAT.COM | awk -F' ' 'NR==1{print \$4}' "]) >> 'service-account'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist | grep -i 'Default principal' | awk -F':' 'NR==1{print \$2}' "]) >> ''
+ 1 * getPipelineMock("sh")('hello')
+ 2 * getPipelineMock('sh')([returnStatus: true, script: "kinit service-account -kt path/to/file"]) >> 1
+ 1 * getPipelineMock('sh')([returnStatus: true, script: "kinit service-account -kt path/to/file"]) >> 0
+ env['KERBEROS_PRINCIPAL'] == 'service-account'
+ noExceptionThrown()
+ }
+
+
+ def "[util.groovy] withKerberos using custom domain succeeded"() {
+ setup:
+ def env = [:]
+ groovyScript.getBinding().setVariable("env", env)
+ // simulate withCredentials binding
+ groovyScript.getBinding().setVariable('KEYTAB_FILE', 'path/to/file')
+ when:
+ groovyScript.withKerberos('keytab-id', {sh 'hello'}, 'CUSTOM.COM')
+ then:
+ 1 * getPipelineMock('file.call')([credentialsId: 'keytab-id', variable: 'KEYTAB_FILE']) >> 'path/to/file'
+ 1 * getPipelineMock('withCredentials')(['path/to/file'], _ as Closure)
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist -kt path/to/file |grep CUSTOM.COM | awk -F' ' 'NR==1{print \$4}' "]) >> 'service-account'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist | grep -i 'Default principal' | awk -F':' 'NR==1{print \$2}' "]) >> ''
+ 1 * getPipelineMock("sh")('hello')
+ 1 * getPipelineMock('sh')([returnStatus: true, script: "kinit service-account -kt path/to/file"]) >> 0
+ env['KERBEROS_PRINCIPAL'] == 'service-account'
+ noExceptionThrown()
+ }
+
+ def "[util.groovy] withKerberos when blank kerberos principal"() {
+ setup:
+ def env = [:]
+ groovyScript.getBinding().setVariable("env", env)
+ // simulate withCredentials binding
+ groovyScript.getBinding().setVariable('KEYTAB_FILE', 'path/to/file')
+ when:
+ groovyScript.withKerberos('keytab-id') {
+ sh 'hello'
+ }
+ then:
+ 1 * getPipelineMock('file.call')([credentialsId: 'keytab-id', variable: 'KEYTAB_FILE']) >> 'path/to/file'
+ 1 * getPipelineMock('withCredentials')(['path/to/file'], _ as Closure)
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist -kt path/to/file |grep REDHAT.COM | awk -F' ' 'NR==1{print \$4}' "]) >> ''
+ 0 * getPipelineMock('sh')([returnStdout: true, script: "klist | grep -i 'Default principal' | awk -F':' 'NR==1{print \$2}' "])
+ // closure not being executed
+ 0 * getPipelineMock("sh")('hello')
+ 0 * getPipelineMock('sh')([returnStatus: true, script: "kinit -kt path/to/file"]) >> 0
+ thrown(Exception)
+ }
+
+ def "[util.groovy] withKerberos when kinit fails"() {
+ setup:
+ def env = [:]
+ groovyScript.getBinding().setVariable("env", env)
+ // simulate withCredentials binding
+ groovyScript.getBinding().setVariable('KEYTAB_FILE', 'path/to/file')
+ when:
+ groovyScript.withKerberos('keytab-id') {
+ sh 'hello'
+ }
+ then:
+ 1 * getPipelineMock('file.call')([credentialsId: 'keytab-id', variable: 'KEYTAB_FILE']) >> 'path/to/file'
+ 1 * getPipelineMock('withCredentials')(['path/to/file'], _ as Closure)
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist -kt path/to/file |grep REDHAT.COM | awk -F' ' 'NR==1{print \$4}' "]) >> 'service-account'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist | grep -i 'Default principal' | awk -F':' 'NR==1{print \$2}' "]) >> ''
+ // closure not being executed
+ 0 * getPipelineMock("sh")('hello')
+ 5 * getPipelineMock('sh')([returnStatus: true, script: "kinit service-account -kt path/to/file"]) >> 1
+ env['KERBEROS_PRINCIPAL'] == 'service-account'
+ thrown(Exception)
+ }
+
+ def "[util.groovy] withKerberos when kinit fails after one try"() {
+ setup:
+ def env = [:]
+ groovyScript.getBinding().setVariable("env", env)
+ // simulate withCredentials binding
+ groovyScript.getBinding().setVariable('KEYTAB_FILE', 'path/to/file')
+ when:
+ groovyScript.withKerberos('keytab-id', { sh 'hello' }, 'REDHAT.COM', 1)
+ then:
+ 1 * getPipelineMock('file.call')([credentialsId: 'keytab-id', variable: 'KEYTAB_FILE']) >> 'path/to/file'
+ 1 * getPipelineMock('withCredentials')(['path/to/file'], _ as Closure)
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist -kt path/to/file |grep REDHAT.COM | awk -F' ' 'NR==1{print \$4}' "]) >> 'service-account'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist | grep -i 'Default principal' | awk -F':' 'NR==1{print \$2}' "]) >> ''
+ // closure not being executed
+ 0 * getPipelineMock("sh")('hello')
+ 1 * getPipelineMock('sh')([returnStatus: true, script: "kinit service-account -kt path/to/file"]) >> 1
+ env['KERBEROS_PRINCIPAL'] == 'service-account'
+ thrown(Exception)
+ }
+
+ def "[util.groovy] withKerberos when principal already authenticated"() {
+ setup:
+ def env = [:]
+ groovyScript.getBinding().setVariable("env", env)
+ // simulate withCredentials binding
+ groovyScript.getBinding().setVariable('KEYTAB_FILE', 'path/to/file')
+ when:
+ groovyScript.withKerberos('keytab-id') {
+ sh 'hello'
+ }
+ then:
+ 1 * getPipelineMock('file.call')([credentialsId: 'keytab-id', variable: 'KEYTAB_FILE']) >> 'path/to/file'
+ 1 * getPipelineMock('withCredentials')(['path/to/file'], _ as Closure)
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist -kt path/to/file |grep REDHAT.COM | awk -F' ' 'NR==1{print \$4}' "]) >> 'service-account'
+ 1 * getPipelineMock('sh')([returnStdout: true, script: "klist | grep -i 'Default principal' | awk -F':' 'NR==1{print \$2}' "]) >> ' service-account'
+ 1 * getPipelineMock("sh")('hello')
+ // kinit not being executed
+ 0 * getPipelineMock('sh')([returnStatus: true, script: "kinit service-account -kt path/to/file"])
+ env['KERBEROS_PRINCIPAL'] == 'service-account'
+ noExceptionThrown()
+ }
+
+ def "[util.groovy] runWithPythonVirtualEnv all params"() {
+ when:
+ def result = groovyScript.runWithPythonVirtualEnv('CMD', 'anyenv', true)
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: true, script: """
+source ~/virtenvs/anyenv/bin/activate
+CMD
+"""]) >> 'output'
+ result == 'output'
+ }
+
+ def "[util.groovy] runWithPythonVirtualEnv default"() {
+ when:
+ def result = groovyScript.runWithPythonVirtualEnv('CMD', 'anyenv')
+ then:
+ 1 * getPipelineMock('sh')([returnStdout: false, script: """
+source ~/virtenvs/anyenv/bin/activate
+CMD
+"""])
+ }
+
+ def "[util.groovy] displayDurationFromSeconds with seconds"() {
+ when:
+ def result = groovyScript.displayDurationFromSeconds(5)
+ then:
+ result == "5s"
+ }
+
+ def "[util.groovy] displayDurationFromSeconds with minutes"() {
+ when:
+ def result = groovyScript.displayDurationFromSeconds(300)
+ then:
+ result == "5m0s"
+ }
+
+ def "[util.groovy] displayDurationFromSeconds with minutes and seconds"() {
+ when:
+ def result = groovyScript.displayDurationFromSeconds(301)
+ then:
+ result == "5m1s"
+ }
+
+ def "[util.groovy] displayDurationFromSeconds with hours"() {
+ when:
+ def result = groovyScript.displayDurationFromSeconds(3600)
+ then:
+ result == "1h0m0s"
+ }
+
+ def "[util.groovy] displayDurationFromSeconds with hours, minutes and seconds"() {
+ when:
+ def result = groovyScript.displayDurationFromSeconds(3723)
+ then:
+ result == "1h2m3s"
+ }
+}
diff --git a/jenkins-pipeline-shared-libraries/vars/buildChain.groovy b/jenkins-pipeline-shared-libraries/vars/buildChain.groovy
new file mode 100644
index 000000000..62ead57ea
--- /dev/null
+++ b/jenkins-pipeline-shared-libraries/vars/buildChain.groovy
@@ -0,0 +1,15 @@
+/*
+* It gets buildChain verion from composite action.yml file
+*/
+def getBuildChainVersionFromCompositeActionFile(String actionFilePath = '.ci/actions/build-chain/action.yml', String usesContainingString = 'github-action-build-chain@') {
+ def actionObject = readYaml(file: actionFilePath)
+
+ def uses = actionObject.runs.steps.uses
+ def action = uses != null ? uses.find({ it.contains(usesContainingString) }) : null
+ if (action == null) {
+ throw new RuntimeException("There's not steps with 'uses' for build-chain ${usesContainingString}")
+ }
+
+ def buildChainScmRevision = action.substring(action.indexOf('@') + 1)
+ return "^${buildChainScmRevision}"
+}
diff --git a/jenkins-pipeline-shared-libraries/vars/cloud.groovy b/jenkins-pipeline-shared-libraries/vars/cloud.groovy
new file mode 100644
index 000000000..e6dc8c331
--- /dev/null
+++ b/jenkins-pipeline-shared-libraries/vars/cloud.groovy
@@ -0,0 +1,331 @@
+// Quay.io:BEGIN //
+
+/*
+* Make a quay.io image public if not already
+*/
+void makeQuayImagePublic(String namespace, String repository, Map credentials = [ 'token': '', 'usernamePassword': '' ]) {
+ if (!isQuayImagePublic(namespace, repository, credentials)
+ && !setQuayImagePublic(namespace, repository, credentials)) {
+ error "Cannot set image quay.io/${namespace}/${repository} as visible"
+ }
+}
+
+/*
+* Checks whether a quay image is public
+*/
+boolean isQuayImagePublic(String namespace, String repository, Map credentials = [ 'token': '', 'usernamePassword': '' ]) {
+ def output = 'false'
+ util.executeWithCredentialsMap(credentials) {
+ output = sh(returnStdout: true, script: "curl -H 'Authorization: Bearer ${QUAY_TOKEN}' -X GET https://quay.io/api/v1/repository/${namespace}/${repository} | jq '.is_public'").trim()
+ }
+ return output == 'true'
+}
+
+/*
+* Sets a Quay repository as public
+*
+* return false if any problem occurs
+*/
+boolean setQuayImagePublic(String namespace, String repository, Map credentials = [ 'token': '', 'usernamePassword': '' ]) {
+ def output = 'false'
+ util.executeWithCredentialsMap(credentials) {
+ output = sh(returnStdout: true, script: "curl -H 'Content-Type: application/json' -H 'Authorization: Bearer ${QUAY_TOKEN}' -X POST --data '{\"visibility\": \"public\"}' https://quay.io/api/v1/repository/${namespace}/${repository}/changevisibility | jq '.success'").trim()
+ }
+ return output == 'true'
+}
+
+/*
+* Update image description on quay.io
+* descriptionString = string content that will be the description of the image
+*/
+void updateQuayImageDescription(String descriptionString, String namespace, String repository, Map credentials = [ 'token': '', 'usernamePassword': '' ]) {
+ util.executeWithCredentialsMap(credentials) {
+ def json = [
+ description: descriptionString
+ ]
+ writeJSON(file: "description.json", json: json)
+ archiveArtifacts(artifacts: 'description.json')
+ sh(script: "curl -H 'Content-type: application/json' -H 'Authorization: Bearer ${QUAY_TOKEN}' -X PUT --data-binary '@description.json' https://quay.io/api/v1/repository/${namespace}/${repository}")
+ }
+}
+
+// Quay.io:END //
+
+/*
+* Login to given OpenShift API
+*/
+void loginOpenShift(String openShiftAPI, String openShiftCredsId) {
+ withCredentials([usernamePassword(credentialsId: openShiftCredsId, usernameVariable: 'OC_USER', passwordVariable: 'OC_PWD')]) {
+ sh "oc login --username=${OC_USER} --password=${OC_PWD} --server=${openShiftAPI} --insecure-skip-tls-verify"
+ }
+}
+
+/*
+* Login to current OpenShift registry
+*
+* It considers that you are already authenticated to OpenShift
+*/
+void loginOpenShiftRegistry(String containerEngine = 'docker', String containerEngineTlsOptions = '') {
+ // username can be anything. See https://docs.openshift.com/container-platform/4.4/registry/accessing-the-registry.html#registry-accessing-directly_accessing-the-registry
+ sh "set +x && ${containerEngine} login -u anything -p \$(oc whoami -t) ${containerEngineTlsOptions} ${getOpenShiftRegistryURL()}"
+}
+
+/*
+* Retrieve the OpenShift registry URL
+*
+* It considers that you are already authenticated to OpenShift
+*/
+String getOpenShiftRegistryURL() {
+ return sh(returnStdout: true, script: "oc get routes -n openshift-image-registry | tail -1 | awk '{print \$2}'")?.trim()
+}
+
+/*
+* Login to a container registry
+*/
+void loginContainerRegistry(String registry, String credsId, String containerEngine = 'docker', String containerEngineTlsOptions = '') {
+ withCredentials([usernamePassword(credentialsId: credsId, usernameVariable: 'REGISTRY_USER', passwordVariable: 'REGISTRY_PWD')]) {
+ sh "set +x && ${containerEngine} login -u ${REGISTRY_USER} -p ${REGISTRY_PWD} ${containerEngineTlsOptions} ${registry}"
+ }
+}
+
+void pullImage(String imageTag, int retries = 3, String containerEngine = 'docker', String containerEngineTlsOptions = '') {
+ retry(retries) {
+ sh "${containerEngine} pull ${containerEngineTlsOptions} ${imageTag}"
+ }
+}
+
+void pushImage(String imageTag, int retries = 3, String containerEngine = 'docker', String containerEngineTlsOptions = '') {
+ retry(retries) {
+ sh "${containerEngine} push ${containerEngineTlsOptions} ${imageTag}"
+ }
+}
+
+void tagImage(String oldImageTag, String newImageTag, String containerEngine = 'docker') {
+ sh "${containerEngine} tag ${oldImageTag} ${newImageTag}"
+}
+
+/*
+* Get reduced tag, aka X.Y, from the given tag
+*/
+String getReducedTag(String originalTag) {
+ try {
+ String[] versionSplit = originalTag.split("\\.")
+ return "${versionSplit[0]}.${versionSplit[1]}"
+ } catch (err) {
+ println "[ERROR] ${originalTag} cannot be reduced to the format X.Y"
+ throw err
+ }
+}
+
+/*
+* Cleanup all containers and images
+*/
+void cleanContainersAndImages(String containerEngine = 'podman') {
+ println '[INFO] Cleaning up running containers and images. Any error here can be ignored'
+ sh(script: "${containerEngine } ps -a -q | tr '\\n' ','", returnStdout: true).trim().split(',').findAll { it != '' }.each {
+ sh "${containerEngine} rm -f ${it} || date"
+}
+ sh(script: "${containerEngine } images -q | tr '\\n' ','", returnStdout: true).trim().split(',').findAll { it != '' }.each {
+ sh "${containerEngine} rmi -f ${it} || date"
+ }
+}
+
+/*
+* Start local docker registry
+*
+* Accessible on `localhost:${port}`. Default port is 5000.
+*/
+String startLocalRegistry(int port = 5000) {
+ cleanLocalRegistry(port)
+ sh "docker run -d -p ${port}:5000 --restart=always --name registry-${port} registry:2"
+ sh 'docker ps'
+ return "localhost:${port}"
+}
+
+/*
+* Find an open local port.
+*/
+int findFreePort() {
+ return Integer.valueOf(sh( script: """ echo \$(python -c 'import socket; s=socket.socket(); s.bind(("", 0)); print(s.getsockname()[1]); s.close()') """, returnStdout: true ).trim())
+}
+
+/*
+* Clean local registry
+*/
+void cleanLocalRegistry(int port = 5000) {
+ sh "docker rm -f registry-${port} || true"
+ sh 'docker ps'
+}
+
+/*
+* Squash a docker image
+*
+* If `replaceCurrentImage` is disabled, the `-squashed` suffix is added to the returned image name
+*/
+String dockerSquashImage(String baseImage, String squashMessage = "${baseImage} squashed", boolean replaceCurrentImage = true) {
+ String squashedPlatformImage = replaceCurrentImage ? "${baseImage}" : "${baseImage}-squashed"
+
+ // Squash images
+ def nbLayers = Integer.parseInt(sh(returnStdout: true, script: "docker history ${baseImage} | grep buildkit.dockerfile | wc -l").trim())
+ nbLayers++ // Get the next layer not done by buildkit
+ echo "Got ${nbLayers} layers to squash"
+ // Use message option in docker-squash due to https://github.com/goldmann/docker-squash/issues/220
+ def dockerSquashShellCmd = "docker-squash -v -m '${squashMessage}' -f ${nbLayers} -t ${squashedPlatformImage} ${baseImage}"
+ sh dockerSquashShellCmd
+ sh "docker push ${squashedPlatformImage}"
+
+ return squashedPlatformImage
+}
+
+/*
+* Print some debugging for a specific image
+*/
+void dockerDebugImage(String imageTag) {
+ sh 'docker images'
+ sh "docker history ${imageTag}"
+ sh "docker inspect ${imageTag}"
+}
+
+
+// Multiplatform build:BEGIN //
+
+/*
+* Build an image for multiple platforms and create a manifest to gather under a same name
+*
+* You should have run `prepareForDockerMultiplatformBuild` method before executing this method
+*/
+void dockerBuildMultiPlatformImages(String buildImageTag, List platforms, boolean squashImages = true, String squashMessage = "Squashed ${buildImageTag}", boolean debug = false, boolean outputToFile = false) {
+ // Build image locally in tgz file
+ List buildPlatformImages = platforms.collect { platform ->
+ String os_arch = platform.replaceAll('/', '-')
+ String platformImage = "${buildImageTag}-${os_arch}"
+ String finalPlatformImage = platformImage
+
+ // Build
+ dockerBuildPlatformImage(platformImage, platform, outputToFile)
+ if (debug) { dockerDebugImage(platformImage) }
+
+ if (squashImages) {
+ finalPlatformImage = dockerSquashImage(platformImage, squashMessage)
+ if (debug) { dockerDebugImage(platformImage) }
+ }
+
+ return finalPlatformImage
+ }
+
+ dockerCreateManifest(buildImageTag, buildPlatformImages)
+ if (debug) { dockerDebugImage(buildImageTag) }
+}
+
+/*
+* Build an image for a specific platform
+*
+* You should have run `prepareForDockerMultiplatformBuild` method before executing this method
+*/
+void dockerBuildPlatformImage(String buildImageTag, String platform, boolean outputToFile = false) {
+ def logFileName = (buildImageTag + '-' + platform + '-build.log')
+ .replaceAll('/','_')
+ .replaceAll(':','_')
+ sh "docker buildx build --push --sbom=false --provenance=false --platform ${platform} -t ${buildImageTag} .${outputToFile ? ' 2> ' + "${WORKSPACE}/${logFileName}" : ''}"
+ sh "docker buildx imagetools inspect ${buildImageTag}"
+ sh "docker pull --platform ${platform} ${buildImageTag}"
+}
+
+/*
+* Create a multiplatform manifest based on the given images
+*/
+void dockerCreateManifest(String buildImageTag, List manifestImages) {
+ sh "docker manifest rm ${buildImageTag} || true"
+ sh "docker manifest create ${buildImageTag} --insecure ${manifestImages.join(' ')}"
+ sh "docker manifest push ${buildImageTag}"
+}
+
+/*
+* Prepare the node for Docker multiplatform build
+*
+* Each element of the `mirrorRegistriesConfig` should contain:
+* - name: Name of the registry to mirror
+* - mirrors: List of mirrors for that registry, containing:
+* - url: mirror url
+* - insecure: whether the mirror is insecure
+*/
+void prepareForDockerMultiplatformBuild(List insecureRegistries = [], List mirrorRegistriesConfig = [], boolean debug = false) {
+ cleanDockerMultiplatformBuild(debug)
+
+ // For multiplatform build
+ sh 'docker run --rm --privileged --name binfmt docker.io/tonistiigi/binfmt --install all'
+
+ if (debug) { debugDockerMultiplatformBuild() }
+
+ String buildkitdtomlConfig = "debug = ${debug}\n"
+
+ insecureRegistries.each {
+ buildkitdtomlConfig += "${getBuildkitRegistryConfigStr(it, true)}"
+ }
+
+ mirrorRegistriesConfig.each { mirrorRegistryCfg ->
+ buildkitdtomlConfig += "[registry.\"${ mirrorRegistryCfg.name }\"]\n"
+ buildkitdtomlConfig += "mirrors = [${ mirrorRegistryCfg.mirrors.collect { "\"${it.url }\"" }.join(',')}]\n"
+ mirrorRegistryCfg.mirrors.each { mirror ->
+ buildkitdtomlConfig += "${getBuildkitRegistryConfigStr(mirror.url, mirror.insecure)}"
+ }
+ }
+
+ writeFile(file: 'buildkitd.toml', text: buildkitdtomlConfig)
+ if (debug) {
+ sh 'cat buildkitd.toml'
+ }
+
+ sh 'docker buildx create --name mybuilder --driver docker-container --driver-opt network=host --bootstrap --config ${WORKSPACE}/buildkitd.toml'
+ sh 'docker buildx use mybuilder'
+
+ if (debug) { debugDockerMultiplatformBuild() }
+}
+
+String getBuildkitRegistryConfigStr(String registryURL, boolean insecure) {
+ return """[registry."${registryURL}"]
+http = ${insecure}
+"""
+}
+
+/*
+* Return the mirror registry config for `docker.io`
+*
+* This checks for internal registry defined as env `DOCKER_REGISTRY_MIRROR`.
+* Fallback to `mirror.gcr.io` is none defined.
+*/
+Map getDockerIOMirrorRegistryConfig() {
+ return [
+ name: 'docker.io',
+ mirrors: [
+ [
+ url : env.DOCKER_REGISTRY_MIRROR ?: 'mirror.gcr.io',
+ insecure: env.DOCKER_REGISTRY_MIRROR ? true : false,
+ ]
+ ],
+ ]
+}
+
+/*
+* Helpful commands to debug `docker buildx` preparation
+*/
+void debugDockerMultiplatformBuild() {
+ sh 'docker context ls'
+ sh 'docker buildx inspect'
+ sh 'docker buildx ls'
+}
+
+/*
+* Clean the node from Docker multiplatform configuration
+*/
+void cleanDockerMultiplatformBuild(boolean debug = false) {
+ sh 'docker buildx rm mybuilder || true'
+ sh 'docker rm -f binfmt || true'
+ if (debug) { debugDockerMultiplatformBuild() }
+}
+
+// Multiplatform build:END //
+
+void skopeoCopyRegistryImages(String oldImageName, String newImageName, int retries = 3) {
+ sh "skopeo copy --retry-times ${retries} --tls-verify=false --all docker://${oldImageName} docker://${newImageName}"
+}
\ No newline at end of file
diff --git a/jenkins-pipeline-shared-libraries/vars/githubscm.groovy b/jenkins-pipeline-shared-libraries/vars/githubscm.groovy
new file mode 100644
index 000000000..e2874a78d
--- /dev/null
+++ b/jenkins-pipeline-shared-libraries/vars/githubscm.groovy
@@ -0,0 +1,670 @@
+def resolveRepository(String repository, String author, String branches, boolean ignoreErrors, String credentialID = 'kie-ci') {
+ println "[INFO] Resolving Repository https://github.com/${author}/${repository}:${branches}. CredentialsID: ${credentialID}"
+ return [$class : 'GitSCM',
+ branches : [[name: branches]],
+ doGenerateSubmoduleConfigurations: false,
+ extensions : [[$class: 'CleanBeforeCheckout'],
+ [$class : 'SubmoduleOption',
+ disableSubmodules : false,
+ parentCredentials : true,
+ recursiveSubmodules: true,
+ reference : '',
+ trackingSubmodules : false],
+ [$class : 'RelativeTargetDirectory',
+ relativeTargetDir: './']],
+ submoduleCfg : [],
+ userRemoteConfigs : [[credentialsId: credentialID, url: "https://github.com/${author}/${repository}.git"]]
+ ]
+}
+
+def checkoutIfExists(String repository, String author, String branches, String defaultAuthor, String defaultBranches, boolean mergeTarget = false, def credentials = ['token': 'kie-ci1-token', 'usernamePassword': 'kie-ci']) {
+ assert credentials['token']
+ assert credentials['usernamePassword']
+ def sourceAuthor = author
+ def sourceRepository = getForkedProjectName(defaultAuthor, repository, sourceAuthor, credentials['token']) ?: repository
+ // Checks source group and branch (for cases where the branch has been created in the author's forked project)
+ def repositoryScm = getRepositoryScm(sourceRepository, author, branches, credentials['usernamePassword'])
+ if (repositoryScm == null) {
+ // Checks target group and and source branch (for cases where the branch has been created in the target project itself
+ repositoryScm = getRepositoryScm(repository, defaultAuthor, branches, credentials['usernamePassword'])
+ sourceAuthor = repositoryScm ? defaultAuthor : author
+ }
+ if (repositoryScm != null && (!mergeTarget || hasPullRequest(defaultAuthor, repository, author, branches, credentials['token']))) {
+ if (mergeTarget) {
+ mergeSourceIntoTarget(sourceRepository, sourceAuthor, branches, repository, defaultAuthor, defaultBranches, credentials['usernamePassword'])
+ } else {
+ checkout repositoryScm
+ }
+ } else {
+ checkout(resolveRepository(repository, defaultAuthor, defaultBranches, false, credentials['usernamePassword']))
+ }
+}
+
+def getRepositoryScm(String repository, String author, String branches, String credentialId = 'kie-ci') {
+ def repositoryScm = resolveRepository(repository, author, branches, true, credentialId)
+ dir("githubscm-get-repository-${repository}") {
+ try {
+ checkout repositoryScm
+ } catch (Exception ex) {
+ println "[WARNING] Branches [${branches}] from repository ${repository} not found in ${author} organisation."
+ repositoryScm = null
+ } finally {
+ deleteDir()
+ }
+ }
+ return repositoryScm
+}
+
+def mergeSourceIntoTarget(String sourceRepository, String sourceAuthor, String sourceBranches, String targetRepository, String targetAuthor, String targetBranches, String credentialId = 'kie-ci') {
+ println "[INFO] Merging source [${sourceAuthor}/${sourceRepository}:${sourceBranches}] into target [${targetAuthor}/${targetRepository}:${targetBranches}]..."
+ checkout(resolveRepository(targetRepository, targetAuthor, targetBranches, false, credentialId))
+ setUserConfigFromCreds(credentialId)
+ def targetCommit = getCommit()
+
+ try {
+ withCredentials([usernameColonPassword(credentialsId: credentialId, variable: 'kieCiUserPassword')]) {
+ sh "git pull https://${kieCiUserPassword}@github.com/${sourceAuthor}/${sourceRepository} ${sourceBranches}"
+ }
+ } catch (Exception e) {
+ println """
+ -------------------------------------------------------------
+ [ERROR] Can't merge source into Target. Please rebase PR branch.
+ -------------------------------------------------------------
+ Source: git://github.com/${sourceAuthor}/${sourceRepository} ${sourceBranches}
+ Target: ${targetCommit}
+ -------------------------------------------------------------
+ """
+ throw e
+ }
+ def mergedCommit = getCommit()
+
+ println """
+ -------------------------------------------------------------
+ [INFO] Source merged into Target
+ -------------------------------------------------------------
+ Target: ${targetCommit}
+ Produced: ${mergedCommit}
+ -------------------------------------------------------------
+ """
+}
+
+def createBranch(String branchName) {
+ try {
+ sh "git checkout -b ${branchName}"
+ } catch (Exception e) {
+ println "[ERROR] Can't create branch ${branchName} on repo."
+ throw e
+ }
+ println "[INFO] Created branch '${branchName}' on repo."
+}
+
+boolean isBranchExist(String remote, String branch) {
+ sh "git fetch ${remote}"
+ return sh(returnStatus: true, script: "git rev-parse ${remote}/${branch}") == 0
+}
+
+/*
+* Remove a branch from the remote
+*
+* You need correct rights to delete the remote tag
+*
+* Will fail if the branch does not exist
+*/
+def removeRemoteBranch(String remote, String branch, String credentialsId = 'kie-ci') {
+ pushObject("--delete ${remote}", "${branch}", credentialsId)
+ println "[INFO] Deleted remote branch ${branch}."
+}
+
+void removeLocalBranch(String branch) {
+ sh "git branch -D ${branch}"
+ println "[INFO] Deleted branch ${branch}."
+}
+
+def commitChanges(String commitMessage, Closure preCommit) {
+ preCommit()
+ sh "git commit -m '${commitMessage}'"
+}
+
+def commitChanges(String commitMessage, String filesToAdd = '-u') {
+ commitChanges(commitMessage, { sh "git add ${filesToAdd}" })
+}
+
+def addRemote(String remoteName, String remoteUrl) {
+ sh "git remote add ${remoteName} ${remoteUrl}"
+}
+
+def squashCommits(String baseBranch, String newCommitMsg) {
+ String branchName = sh(returnStdout: true, script: 'git rev-parse --abbrev-ref HEAD').trim()
+ String mergeName = sh(returnStdout: true, script: "git merge-base ${baseBranch} ${branchName}").trim()
+ sh "git reset ${mergeName}"
+ sh 'git add -A'
+ sh "git commit -m \"${newCommitMsg}\""
+}
+
+def forkRepo(String credentialID = 'kie-ci') {
+ cleanHubAuth()
+ withCredentials([usernamePassword(credentialsId: credentialID, usernameVariable: 'GITHUB_USER', passwordVariable: 'GITHUB_TOKEN')]) {
+ setUserConfig("${GITHUB_USER}")
+ sh 'git config hub.protocol https'
+ sh 'hub fork --remote-name=origin'
+ sh 'git remote -v'
+ }
+}
+
+def createPR(String pullRequestTitle, String pullRequestBody = '', String targetBranch = 'main', String credentialID = 'kie-ci') {
+ def pullRequestLink
+ try {
+ pullRequestLink = executeHub("hub pull-request -m '${pullRequestTitle}' -m '${pullRequestBody}' -b '${targetBranch}'", credentialID)
+ } catch (Exception e) {
+ println "[ERROR] Unable to create PR. Please make sure the targetBranch ${targetBranch} is correct."
+ throw e
+ }
+ println "Please see the created PR at: ${pullRequestLink}"
+ return pullRequestLink
+}
+
+def createPrAsDraft(String pullRequestTitle, String pullRequestBody = '', String targetBranch = 'main', String credentialID = 'kie-ci') {
+ def pullRequestLink
+ try {
+ pullRequestLink = executeHub("hub pull-request -d -m '${pullRequestTitle}' -m '${pullRequestBody}' -b '${targetBranch}'", credentialID)
+ } catch (Exception e) {
+ println "[ERROR] Unable to create Draft PR. Please make sure the targetBranch ${targetBranch} is correct."
+ throw e
+ }
+ println "Please see the created Draft PR at: ${pullRequestLink}"
+ return pullRequestLink
+}
+
+def createPRWithLabels(String pullRequestTitle, String pullRequestBody = '', String targetBranch = 'main', String[] labels, String credentialID = 'kie-ci') {
+ def pullRequestLink
+ try {
+ pullRequestLink = executeHub("hub pull-request -m '${pullRequestTitle }' -m '${pullRequestBody }' -b '${targetBranch }' -l ${labels.collect { it -> "'${it }'" }.join(',')}", credentialID)
+ } catch (Exception e) {
+ println "[ERROR] Unable to create PR. Please make sure the targetBranch ${targetBranch} is correct."
+ throw e
+ }
+ println "Please see the created PR at: ${pullRequestLink}"
+ return pullRequestLink
+}
+
+def executeHub(String hubCommand, String credentialID = 'kie-ci') {
+ cleanHubAuth()
+ withCredentials([usernamePassword(credentialsId: credentialID, usernameVariable: 'GITHUB_USER', passwordVariable: 'GITHUB_TOKEN')]) {
+ setUserConfig("${GITHUB_USER}")
+ return sh(returnStdout: true, script: hubCommand).trim()
+ }
+}
+
+def mergePR(String pullRequestLink, String credentialID = 'kie-ci') {
+ cleanHubAuth()
+ withCredentials([usernamePassword(credentialsId: credentialID, usernameVariable: 'GITHUB_USER', passwordVariable: 'GITHUB_TOKEN')]) {
+ try {
+ setUserConfig("${GITHUB_USER}")
+ sh "hub merge ${pullRequestLink}"
+ } catch (Exception e) {
+ println "[ERROR] Can't merge PR ${pullRequestLink} on repo."
+ throw e
+ }
+ println "[INFO] Merged PR '${pullRequestLink}' on repo."
+ }
+}
+
+// Optional: Pass in env.BUILD_TAG as buildTag in pipeline script
+// to trace back the build from which this tag came from.
+def tagRepository(String tagName, String buildTag = '') {
+ def currentCommit = getCommit()
+ def tagMessageEnding = buildTag ? " in build \"${buildTag}\"." : '.'
+ def tagMessage = "Tagged by Jenkins${tagMessageEnding}"
+ sh "git tag -a '${tagName}' -m '${tagMessage}'"
+ println """
+-------------------------------------------------------------
+[INFO] Tagged current repository
+-------------------------------------------------------------
+Commit: ${currentCommit}
+Tagger: ${env.GIT_COMMITTER_NAME} (${env.GIT_COMMITTER_EMAIL})
+Tag: ${tagName}
+Tag Message: ${tagMessage}
+-------------------------------------------------------------
+"""
+}
+
+/*
+* Push a tag to the remote
+*
+* You need correct rights to create the tag
+*/
+
+def pushRemoteTag(String remote, String tagName, String credentialsId = 'kie-ci') {
+ pushObject(remote, "--tags ${tagName}", credentialsId)
+ println "[INFO] Pushed remote tag ${tagName}."
+}
+
+boolean isTagExist(String remote, String tagName) {
+ sh "git fetch ${remote} --tags"
+ return sh(returnStatus: true, script: "git rev-parse ${tagName}") == 0
+}
+
+void removeLocalTag(String tagName) {
+ sh "git tag -d ${tagName}"
+ println "[INFO] Deleted tag ${tagName}."
+}
+
+/*
+* Remove a tag from the remote
+*
+* You need correct rights to delete the remote tag
+*
+* Will fail if the tag does not exist
+*/
+
+def removeRemoteTag(String remote, String tagName, String credentialsId = 'kie-ci') {
+ pushObject("--delete ${remote}", "${tagName}", credentialsId)
+ println "[INFO] Deleted remote tag ${tagName}."
+}
+
+/*
+* Creates a new release on GitHub
+*/
+void createRelease(String tagName, String buildBranch, String description = "Release ${tagName}", String credentialsId = 'kie-ci') {
+ withCredentials([usernamePassword(credentialsId: "${credentialsId}", usernameVariable: 'GH_USER', passwordVariable: 'GH_TOKEN')]) {
+ sh "gh release create ${tagName} --target ${buildBranch} --title ${tagName} --notes \"${description}\""
+ }
+}
+
+/*
+* Creates a new release on GitHub with release notes
+*/
+void createReleaseWithReleaseNotes(String tagName, String buildBranch, String releaseNotes = 'Release Notes', String credentialsId = 'kie-ci') {
+ withCredentials([usernamePassword(credentialsId: "${credentialsId}", usernameVariable: 'GH_USER', passwordVariable: 'GH_TOKEN')]) {
+ sh "gh release create ${tagName} --target ${buildBranch} --title ${tagName} -F ${releaseNotes}"
+ }
+}
+
+/*
+* Creates a new release on GitHub with GH generated release notes
+*/
+void createReleaseWithGeneratedReleaseNotes(String tagName, String buildBranch, String previousTag, String credentialsId = 'kie-ci') {
+ withCredentials([usernamePassword(credentialsId: "${credentialsId}", usernameVariable: 'GH_USER', passwordVariable: 'GH_TOKEN')]) {
+ sh "gh release create ${tagName} --target ${buildBranch} --title ${tagName} --generate-notes --notes-start-tag ${previousTag}"
+ }
+}
+
+/*
+* Removes a release on GitHub
+*/
+void deleteRelease(String tagName, String credentialsId = 'kie-ci') {
+ withCredentials([usernamePassword(credentialsId: "${credentialsId}", usernameVariable: 'GH_USER', passwordVariable: 'GH_TOKEN')]) {
+ sh "gh release delete ${tagName} -y"
+ }
+}
+
+/*
+* Removes a release and its tag on GitHub
+*/
+void deleteReleaseAndTag(String tagName, String credentialsId = 'kie-ci') {
+ withCredentials([usernamePassword(credentialsId: "${credentialsId}", usernameVariable: 'GH_USER', passwordVariable: 'GH_TOKEN')]) {
+ sh "gh release delete --cleanup-tag ${tagName} -y"
+ }
+}
+
+/*
+* Checks whether a release exists on GitHub
+*/
+boolean isReleaseExist(String tagName, String credentialsId = 'kie-ci') {
+ withCredentials([usernamePassword(credentialsId: "${credentialsId}", usernameVariable: 'GH_USER', passwordVariable: 'GH_TOKEN')]) {
+ // checks if the release is already existing
+ exist = sh(script: "gh release view ${tagName}", returnStatus: true) == 0
+ }
+ return exist
+}
+
+/*
+* Tag Local and remote repository
+*
+* You need correct rights to create or delete (in case of override) the tag
+*/
+
+def tagLocalAndRemoteRepository(String remote, String tagName, String credentialsId = 'kie-ci', String buildTag = '', boolean override = false) {
+ if (override && isTagExist(remote, tagName)) {
+ println "[INFO] Tag ${tagName} exists... Overriding it."
+ removeLocalTag(tagName)
+ removeRemoteTag(remote, tagName, credentialsId)
+ }
+
+ tagRepository(tagName, buildTag)
+ pushRemoteTag(remote, tagName, credentialsId)
+}
+
+def pushObject(String remote, String object, String credentialsId = 'kie-ci') {
+ try {
+ withCredentials([usernamePassword(credentialsId: "${credentialsId}", usernameVariable: 'GITHUB_USER', passwordVariable: 'GITHUB_TOKEN')]) {
+ setUserConfig("${GITHUB_USER}")
+ sh("git config --local credential.helper \"!f() { echo username=\\$GITHUB_USER; echo password=\\$GITHUB_TOKEN; }; f\"")
+ sh("git push ${remote} ${object}")
+ }
+ } catch (Exception e) {
+ println "[ERROR] Couldn't push object '${object}' to ${remote}."
+ throw e
+ }
+ println "[INFO] Pushed object '${object}' to ${remote}."
+}
+
+def setUserConfig(String username, String domain = 'jenkins.kie.apache.org', boolean global=false) {
+ sh "git config ${(global?'--global ':'')}user.email ${username}@${domain}"
+ sh "git config ${(global?'--global ':'')}user.name ${username}"
+}
+
+def setUserConfigFromCreds(String credentialsId = 'kie-ci') {
+ withCredentials([usernamePassword(credentialsId: "${credentialsId}", usernameVariable: 'GITHUB_USER', passwordVariable: 'GITHUB_TOKEN')]) {
+ setUserConfig("${GITHUB_USER}")
+ }
+}
+
+def getCommit() {
+ return sh(returnStdout: true, script: 'git log --oneline -1').trim()
+}
+
+def getCommitHash() {
+ return sh(returnStdout: true, script: 'git rev-parse HEAD').trim()
+}
+
+String getTagCommitHash(String tagName) {
+ return sh(returnStdout: true, script: "git rev-list -n 1 ${tagName}").trim()
+}
+
+/*
+* Retrieve the Git repository URL from current dir
+*/
+def getGitRepositoryURL() {
+ return sh(returnStdout: true, script: 'git config --get remote.origin.url | head -n 1').trim()
+}
+
+def getGitRepositoryName() {
+ return sh(returnStdout: true, script: "basename ${getGitRepositoryURL()} | sed 's|\\.git||g'").trim()
+}
+
+def getGitRepositoryAuthor() {
+ return sh(returnStdout: true, script: "echo ${getGitRepositoryURL()} | sed 's|/${getGitRepositoryName()}.*||g' | sed 's|.*github.com.\\?||g'").trim()
+}
+
+def getBranch() {
+ return sh(returnStdout: true, script: 'git branch --all --contains HEAD').trim()
+}
+
+def getRemoteInfo(String remoteName, String configName) {
+ return sh(returnStdout: true, script: "git config --get remote.${remoteName}.${configName}").trim()
+}
+
+def hasPullRequest(String group, String repository, String author, String branch, String credentialsId = 'kie-ci1-token') {
+ return hasForkPullRequest(group, repository, author, branch, credentialsId) || hasOriginPullRequest(group, repository, branch, credentialsId)
+}
+
+def hasOriginPullRequest(String group, String repository, String branch, String credentialsId = 'kie-ci1-token') {
+ return hasForkPullRequest(group, repository, group, branch, credentialsId)
+}
+
+def hasForkPullRequest(String group, String repository, String author, String branch, String credentialsId = 'kie-ci1-token') {
+ def result = false
+ withCredentials([string(credentialsId: credentialsId, variable: 'OAUTHTOKEN')]) {
+ def curlResult = sh(returnStdout: true, script: "curl --globoff -H \"Authorization: token ${OAUTHTOKEN}\" 'https://api.github.com/repos/${group}/${repository}/pulls?head=${author}:${branch}&state=open'")?.trim()
+ if (curlResult) {
+ def pullRequestJsonObject = readJSON text: curlResult
+ result = pullRequestJsonObject.size() > 0
+ }
+ }
+ println "[INFO] has pull request for ${group}/${repository}:${author}:${branch} -> ${result}"
+ return result
+}
+
+def getForkedProjectName(String group, String repository, String owner, String credentialsId = 'kie-ci1-token', int page = 1, int perPage = 100, replays = 3) {
+ if (group == owner) {
+ return repository
+ }
+ def result = null
+ withCredentials([string(credentialsId: credentialsId, variable: 'OAUTHTOKEN')]) {
+ def forkedProjects = null
+
+ def curlResult = sh(returnStdout: true, script: "curl -H \"Authorization: token ${OAUTHTOKEN}\" 'https://api.github.com/repos/${group}/${repository}/forks?per_page=${perPage}&page=${page}'")?.trim()
+ if (curlResult) {
+ forkedProjects = readJSON text: curlResult
+ }
+ if (result == null && forkedProjects != null && forkedProjects.size() > 0) {
+ try {
+ def forkedProject = forkedProjects.find { it.owner.login == owner }
+ result = forkedProject ? forkedProject.name : getForkedProjectName(group, repository, owner, credentialsId, ++page, perPage)
+ } catch (MissingPropertyException e) {
+ if (--replays <= 0) {
+ throw new Exception("Error getting forked project name for ${group}/${repository}/forks?per_page=${perPage}&page=${page}. Communication error, please relaunch job.")
+ } else {
+ println("[ERROR] Getting forked project name for ${group}/${repository}/forks?per_page=${perPage}&page=${page}. Replaying... [${replays}]")
+ result = getForkedProjectName(group, repository, owner, credentialsId, page, perPage, replays)
+ }
+ }
+ }
+ }
+ return result
+}
+
+def cleanHubAuth() {
+ sh 'rm -rf ~/.config/hub'
+}
+
+def cleanWorkingTree() {
+ sh 'git clean -xdf'
+}
+
+/**
+ * Uses `find` command to stage all files matching the pattern and which are not in .gitignore
+ */
+def findAndStageNotIgnoredFiles(String findNamePattern) {
+ // based on https://stackoverflow.com/a/59888964/8811872
+ sh """
+ find . -type f -name '${findNamePattern}' > found_files.txt
+ files_to_add=""
+ while IFS= read -r file; do
+ if ! git check-ignore -q "\$file"; then
+ files_to_add="\$files_to_add \$file"
+ fi
+ done < found_files.txt
+ rm found_files.txt
+ if [ ! -z "\$files_to_add" ]; then
+ git add \$files_to_add
+ fi
+ git status
+ """
+}
+
+boolean isThereAnyChanges() {
+ return sh(script: 'git status --porcelain', returnStdout: true).trim() != ''
+}
+
+def updateReleaseBody(String tagName, String credsId = 'kie-ci') {
+ String releaseNotesFile = 'release_notes'
+ withCredentials([usernamePassword(credentialsId: credsId, usernameVariable: 'GH_USER', passwordVariable: 'GH_TOKEN')]) {
+ sh "gh release view ${tagName} --json body --jq .body > ${releaseNotesFile}"
+
+ sh """
+ #!/bin/bash
+ sed -i -r 's|\\[((incubator-)?kie-issues[-#][0-9]*)\\](.*)|\\1\\3|g' ${releaseNotesFile}
+ sed -i -r 's|(incubator-)?kie-issues[-#]([0-9]*)(.*)|\\[kie-issues#\\2\\](https\\://github\\.com/apache/incubator-kie-issues/issues/\\2)\\3|g' ${releaseNotesFile}
+ """
+ sh "gh release edit ${tagName} -F ${releaseNotesFile}"
+ }
+}
+
+/*
+* DEPRECATED
+*
+* Should use `getLatestTag` method instead which is more flexible
+*/
+@Deprecated
+def getPreviousTag(String ignoreTag) {
+ String latestTag = sh(returnStdout: true, script: 'git tag --sort=-taggerdate | head -n 1').trim()
+ if (latestTag == ignoreTag) {
+ latestTag = sh(returnStdout: true, script: 'git tag --sort=-taggerdate | head -n 2 | tail -n 1').trim()
+ }
+ echo "Got latestTag = ${latestTag}"
+ return latestTag
+}
+
+def getLatestTag(String startsWith = '', String endsWith = '', List ignoreTags = []) {
+ String cmd = 'git tag --sort=-taggerdate'
+ cmd += ignoreTags.collect { tag -> " | grep -v '${tag}'" }.join('')
+ if (startsWith) {
+ cmd += " | grep '^${startsWith}'"
+ }
+ if (endsWith) {
+ cmd += " | grep '${endsWith}\$'"
+ }
+ cmd += ' | head -n 1'
+ return sh(returnStdout: true, script: cmd).trim()
+}
+
+def getPreviousTagFromVersion(String version, String startsWith = '', String endsWith = '', List filterOutGrep = [], boolean debug = false) {
+ if (debug) { println "getPreviousTagFromVersion for version = ${version}" }
+ String cmd = 'git tag --sort=-committerdate'
+ if (endsWith) {
+ cmd += " | grep '${endsWith}\$'"
+ }
+ if (filterOutGrep) {
+ cmd += " ${filterOutGrep.collect { "| grep -v '${it}'" }.join(' ')}"
+ }
+ Integer[] versionSplit = util.parseVersion(version)
+
+ Closure searchTag = { tagToSearch, reverse ->
+ if (debug) { println "Searching tag ${tagToSearch}" }
+ String foundTag = sh(returnStdout: true, script: "${cmd} | grep '${tagToSearch}' | sort -V${reverse ? ' -r' : ''}")?.trim()
+ if (debug) { println "Found tag ${foundTag}" }
+ return foundTag ? foundTag.split('\n')[0] : ''
+ }
+
+ // Previous micro search
+ int micro = versionSplit[2]
+ while (micro-- > 0) {
+ String foundTag = searchTag("^${startsWith}${versionSplit[0]}.${versionSplit[1]}.${micro}", true)
+ if (foundTag) { return foundTag }
+ }
+
+ // Previous minor search
+ int minor = versionSplit[1]
+ while (minor-- > 0) {
+ String foundTag = searchTag("^${startsWith}${versionSplit[0]}.${minor}.", false)
+ if (foundTag) { return foundTag }
+ }
+
+ // Previous major search (different looking for)
+ int major = versionSplit[0]
+ while (major-- > 0) {
+ String foundTag = searchTag("^${startsWith}${major}.", true)
+ if (foundTag) { return foundTag }
+ }
+
+ return ''
+}
+
+/*
+* Store in env the commit info needed to update the commit status
+*/
+void prepareCommitStatusInformation(String repository, String author, String branch, String credentialsId = 'kie-ci') {
+ dir("githubscm-prepare-commit-${repository}") {
+ try {
+ checkout(resolveRepository(repository, author, branch, false, credentialsId))
+ setCommitStatusRepoURLEnv(repository)
+ setCommitStatusShaEnv(repository)
+ } finally {
+ deleteDir()
+ }
+ }
+}
+
+/*
+* Store in env the commit info needed to update the commit status of a PR
+*/
+void prepareCommitStatusInformationForPullRequest(String repository, String author, String branch, String targetAuthor, String credentialsId = 'kie-ci') {
+ prepareCommitStatusInformation(repository, author, branch, credentialsId)
+ setCommitStatusRepoURLEnv(repository, "https://github.com/${targetAuthor}/${repository}")
+}
+
+String getCommitStatusRepoURLEnv(String repository) {
+ return env."${repository.toUpperCase()}_COMMIT_STATUS_REPO_URL"
+}
+
+void setCommitStatusRepoURLEnv(String repository, String url = '') {
+ env."${repository.toUpperCase()}_COMMIT_STATUS_REPO_URL" = url ?: getGitRepositoryURL()
+}
+
+String getCommitStatusShaEnv(String repository) {
+ return env."${repository.toUpperCase()}_COMMIT_STATUS_SHA"
+}
+
+void setCommitStatusShaEnv(String repository, String sha = '') {
+ env."${repository.toUpperCase()}_COMMIT_STATUS_SHA" = sha ?: getCommitHash()
+}
+
+/*
+* UpdateGithubCommitStatus for the given repository
+*
+* (Run `prepareCommitStatusInformation` before if you need to set specific commit info before updating. Useful when working with `checkoutIfExists`)
+*
+* @params checkName Name of the check to appear into GH check status page
+* @params state State of the check: 'PENDING' / 'SUCCESS' / 'ERROR' / 'FAILURE'
+* @params message Message to display next to the check
+*/
+def updateGithubCommitStatus(String checkName, String state, String message, String repository = '') {
+ println "[INFO] Update commit status for check ${checkName}: state = ${state} and message = ${message}"
+
+ if (!repository) {
+ println '[INFO] No given repository... Trying to guess it from current directory'
+ repository = getGitRepositoryName()
+ }
+ println "[DEBUG] repository name = ${repository}"
+
+ if (!getCommitStatusRepoURLEnv(repository) || !getCommitStatusShaEnv(repository)) {
+ println '[INFO] Commit status info are not stored, guessing from current repository'
+ setCommitStatusRepoURLEnv(repository)
+ setCommitStatusShaEnv(repository)
+ }
+ println "[DEBUG] repo url = ${getCommitStatusRepoURLEnv(repository)}"
+ println "[DEBUG] commit sha = ${getCommitStatusShaEnv(repository)}"
+
+ try {
+ step([
+ $class: 'GitHubCommitStatusSetter',
+ commitShaSource: [$class: 'ManuallyEnteredShaSource', sha: getCommitStatusShaEnv(repository)],
+ contextSource: [$class: 'ManuallyEnteredCommitContextSource', context: checkName],
+ reposSource: [$class: 'ManuallyEnteredRepositorySource', url: getCommitStatusRepoURLEnv(repository)],
+ statusResultSource: [ $class: 'ConditionalStatusResultSource', results: [[$class: 'AnyBuildResult', message: message, state: state]] ],
+ ])
+ } catch(err) {
+ println "Error updating commit status: ${err}"
+ }
+}
+
+def updateGithubCommitStatusFromBuildResult(String checkName) {
+ println "[INFO] Update commit status for check ${checkName} from build result"
+ String buildResult = currentBuild.currentResult
+ println "[DEBUG] Got build result ${buildResult}"
+
+ def testResults = util.retrieveTestResults()
+ println "[DEBUG] Got test results ${testResults}"
+ String testsInfo = testResults ? "${testResults.passCount + testResults.skipCount + testResults.failCount} tests run, ${testResults.failCount} failed, ${testResults.skipCount} skipped." : 'No test results found.'
+
+ int jobDuration = util.getJobDurationInSeconds()
+ println "[DEBUG] Got job duration ${jobDuration} seconds"
+ String timeInfo = util.displayDurationFromSeconds(jobDuration)
+
+ switch (buildResult) {
+ case 'SUCCESS':
+ updateGithubCommitStatus(checkName, 'SUCCESS', "(${timeInfo}) Check is successful. ${testsInfo}".trim())
+ break
+ case 'UNSTABLE':
+ updateGithubCommitStatus(checkName, 'FAILURE', "(${timeInfo}) Test failures occurred. ${testsInfo}".trim())
+ break
+ case 'ABORTED':
+ updateGithubCommitStatus(checkName, 'ERROR', "(${timeInfo}) Job aborted. ${testsInfo}".trim())
+ break
+ default:
+ updateGithubCommitStatus(checkName, 'ERROR', "(${timeInfo}) Issue in pipeline. ${testsInfo}".trim())
+ break
+ }
+}
diff --git a/jenkins-pipeline-shared-libraries/vars/mailer.groovy b/jenkins-pipeline-shared-libraries/vars/mailer.groovy
new file mode 100644
index 000000000..9d1133b75
--- /dev/null
+++ b/jenkins-pipeline-shared-libraries/vars/mailer.groovy
@@ -0,0 +1,68 @@
+def sendEmailFailure() {
+ def branch = env.CHANGE_BRANCH ?: env.ghprbSourceBranch
+ emailext (
+ subject: "Build $branch failed",
+ body: "Build $branch failed! For more information see $BUILD_URL",
+ recipientProviders: [[$class: 'DevelopersRecipientProvider'], [$class: 'RequesterRecipientProvider']])
+}
+
+def sendEmail_failedPR(String additionalSubject = null ) {
+ emailext(
+ subject: "${additionalSubject?.trim() || additionalSubject?.trim() != null ? additionalSubject?.trim() : 'PR'} #$ghprbPullId of $ghprbGhRepository: $ghprbPullTitle failed",
+ body: """
+ Pull request #$ghprbPullId of $ghprbGhRepository: $ghprbPullTitle FAILED
+ Build log: ${BUILD_URL}consoleText
+ Failed tests \${TEST_COUNTS,var=\"fail\"}: ${BUILD_URL}testReport
+ (IMPORTANT: For visiting the links you need to have access to Red Hat VPN. In case you don\'t have access to RedHat VPN please download and decompress attached file.)
+ """,
+ attachmentsPattern: 'error.log.gz',
+ recipientProviders: [[$class: 'DevelopersRecipientProvider'], [$class: 'RequesterRecipientProvider']])
+}
+
+def sendEmail_unstablePR(String additionalSubject = null ) {
+ emailext(
+ subject: "${additionalSubject?.trim() || additionalSubject?.trim() != null ? additionalSubject?.trim() : 'PR'} #$ghprbPullId of $ghprbGhRepository: $ghprbPullTitle was unstable",
+ body: """
+ Pull request #$ghprbPullId of $ghprbGhRepository: $ghprbPullTitle was UNSTABLE
+ Build log: ${BUILD_URL}consoleText
+ Failed tests \${TEST_COUNTS,var=\"fail\"}: ${BUILD_URL}testReport
+ (IMPORTANT: For visiting the links you need to have access to Red Hat VPN)
+ ***********************************************************************************************************************************************************
+ \${FAILED_TESTS}
+ """,
+ recipientProviders: [[$class: 'DevelopersRecipientProvider'], [$class: 'RequesterRecipientProvider']])
+}
+
+def sendEmail_fixedPR(String additionalSubject = null ) {
+ emailext(
+ subject: "${additionalSubject?.trim() || additionalSubject?.trim() != null ? additionalSubject?.trim() : 'PR'} #$ghprbPullId of $ghprbGhRepository: $ghprbPullTitle is fixed and was SUCCESSFUL",
+ body: '',
+ recipientProviders: [[$class: 'DevelopersRecipientProvider'], [$class: 'RequesterRecipientProvider']])
+}
+
+def sendEmail_abortedPR(String additionalSubject = null ) {
+ emailext(
+ subject: "${additionalSubject?.trim() || additionalSubject?.trim() != null ? additionalSubject?.trim() : 'PR'} #$ghprbPullId of $ghprbGhRepository: $ghprbPullTitle was ABORTED",
+ body: '',
+ recipientProviders: [[$class: 'DevelopersRecipientProvider'], [$class: 'RequesterRecipientProvider']])
+}
+
+def buildLogScriptPR () {
+ dir("$WORKSPACE") {
+ sh 'touch trace.sh'
+ sh 'chmod 755 trace.sh'
+ sh 'echo "wget --no-check-certificate ${BUILD_URL}consoleText" >> trace.sh'
+ sh 'echo "tail -n 750 consoleText >> error.log" >> trace.sh'
+ sh 'echo "gzip error.log" >> trace.sh'
+ }
+}
+
+void sendMarkdownTestSummaryNotification(String jobId, String subject, List recipients, String additionalInfo = '', String buildUrl = "${BUILD_URL}") {
+ emailext subject: (jobId ? "${subject} - ${jobId}" : subject),
+ to: recipients.join(','),
+ body: util.getMarkdownTestSummary('', additionalInfo, buildUrl)
+}
+
+void sendMarkdownTestSummaryNotification(String subject, List recipients, String additionalInfo = '', String buildUrl = "${BUILD_URL}") {
+ sendMarkdownTestSummaryNotification('', subject, recipients, additionalInfo, buildUrl)
+}
diff --git a/jenkins-pipeline-shared-libraries/vars/maven.groovy b/jenkins-pipeline-shared-libraries/vars/maven.groovy
new file mode 100644
index 000000000..1ac1f77de
--- /dev/null
+++ b/jenkins-pipeline-shared-libraries/vars/maven.groovy
@@ -0,0 +1,196 @@
+import java.util.Properties
+import org.kie.jenkins.MavenCommand
+
+def runMaven(String goals, List options = [], Properties properties = null, String logFileName = null) {
+ new MavenCommand(this)
+ .withOptions(options)
+ .withProperties(properties)
+ .withLogFileName(logFileName)
+ .run(goals)
+}
+
+def runMaven(String goals, boolean skipTests, List options = [], String logFileName = null) {
+ new MavenCommand(this)
+ .withOptions(options)
+ .skipTests(skipTests)
+ .withLogFileName(logFileName)
+ .run(goals)
+}
+
+def runMavenWithSettings(String settingsXmlId, String goals, Properties properties, String logFileName = null) {
+ configFileProvider([configFile(fileId: settingsXmlId, variable: 'MAVEN_SETTINGS_XML')]) {
+ new MavenCommand(this, ['-fae'])
+ .withSettingsXmlFile(MAVEN_SETTINGS_XML)
+ .withProperties(properties)
+ .withLogFileName(logFileName)
+ .run(goals)
+ }
+}
+
+def runMavenWithSettings(String settingsXmlId, String goals, boolean skipTests, String logFileName = null) {
+ configFileProvider([configFile(fileId: settingsXmlId, variable: 'MAVEN_SETTINGS_XML')]) {
+ new MavenCommand(this, ['-fae'])
+ .withSettingsXmlFile(MAVEN_SETTINGS_XML)
+ .skipTests(skipTests)
+ .withLogFileName(logFileName)
+ .run(goals)
+ }
+}
+
+/**
+ *
+ * @param settingsXmlId settings.xml file
+ * @param goals maven gals
+ * @param sonarCloudId Jenkins token for SonarCloud*
+ */
+def runMavenWithSettingsSonar(String settingsXmlId, String goals, String sonarCloudId, String logFileName = null) {
+ configFileProvider([configFile(fileId: settingsXmlId, variable: 'MAVEN_SETTINGS_XML')]) {
+ withCredentials([string(credentialsId: sonarCloudId, variable: 'TOKEN')]) {
+ new MavenCommand(this)
+ .withSettingsXmlFile(MAVEN_SETTINGS_XML)
+ .withProperty('sonar.login', "${TOKEN}")
+ .withLogFileName(logFileName)
+ .run(goals)
+ }
+ }
+}
+
+def mvnVersionsSet(String newVersion, boolean allowSnapshots = false) {
+ mvnVersionsSet(new MavenCommand(this), newVersion, allowSnapshots)
+}
+
+def mvnVersionsSet(MavenCommand mvnCmd, String newVersion, boolean allowSnapshots = false) {
+ mvnCmd.clone()
+ .withOptions(['-N', '-e'])
+ .withProperty('full')
+ .withProperty('newVersion', newVersion)
+ .withProperty('allowSnapshots', allowSnapshots)
+ .withProperty('generateBackupPoms', false)
+ .run('versions:set')
+}
+
+def mvnVersionsUpdateParent(String newVersion, boolean allowSnapshots = false) {
+ mvnVersionsUpdateParent(new MavenCommand(this), newVersion, allowSnapshots)
+}
+
+def mvnVersionsUpdateParent(MavenCommand mvnCmd, String newVersion, boolean allowSnapshots = false) {
+ mvnCmd.clone()
+ .withOptions(['-N', '-e'])
+ .withProperty('full')
+ .withProperty('parentVersion', "[${newVersion}]")
+ .withProperty('allowSnapshots', allowSnapshots)
+ .withProperty('generateBackupPoms', false)
+ .run('versions:update-parent')
+}
+
+def mvnVersionsUpdateChildModules(boolean allowSnapshots = false) {
+ mvnVersionsUpdateChildModules(new MavenCommand(this), allowSnapshots)
+}
+
+def mvnVersionsUpdateChildModules(MavenCommand mvnCmd, boolean allowSnapshots = false) {
+ mvnCmd.clone()
+ .withOptions(['-N', '-e'])
+ .withProperty('full')
+ .withProperty('allowSnapshots', allowSnapshots)
+ .withProperty('generateBackupPoms', false)
+ .run('versions:update-child-modules')
+}
+
+def mvnVersionsUpdateParentAndChildModules(String newVersion, boolean allowSnapshots = false) {
+ mvnVersionsUpdateParentAndChildModules(new MavenCommand(this), newVersion, allowSnapshots)
+}
+
+def mvnVersionsUpdateParentAndChildModules(MavenCommand mvnCmd, String newVersion, boolean allowSnapshots = false) {
+ mvnVersionsUpdateParent(mvnCmd, newVersion, allowSnapshots)
+ mvnVersionsUpdateChildModules(mvnCmd, allowSnapshots)
+}
+
+def mvnGetVersionProperty(String property, String pomFile = 'pom.xml') {
+ mvnGetVersionProperty(new MavenCommand(this), property, pomFile)
+}
+
+def mvnGetVersionProperty(MavenCommand mvnCmd, String property, String pomFile = 'pom.xml') {
+ mvnCmd.clone()
+ .withOptions(['-q', '-f', "${pomFile}"])
+ .withProperty('expression', property)
+ .withProperty('forceStdout')
+ .returnOutput()
+ .run('help:evaluate')
+ .trim()
+}
+
+def mvnSetVersionProperty(String property, String newVersion) {
+ mvnSetVersionProperty(new MavenCommand(this), property, newVersion)
+}
+
+def mvnSetVersionProperty(MavenCommand mvnCmd, String property, String newVersion) {
+ mvnCmd.clone()
+ .withOptions(['-e'])
+ .withProperty('property', property)
+ .withProperty('newVersion', newVersion)
+ .withProperty('allowSnapshots', true)
+ .withProperty('generateBackupPoms', false)
+ .run('versions:set-property')
+}
+
+def mvnCompareDependencies(String remotePom, String project = '', boolean updateDependencies = false, boolean updatePropertyVersions = false) {
+ mvnCompareDependencies(new MavenCommand(this), remotePom, project, updateDependencies, updatePropertyVersions)
+}
+
+def mvnCompareDependencies(MavenCommand mvnCmd, String remotePom, String project = '', boolean updateDependencies = false, boolean updatePropertyVersions=false) {
+ def newMvnCmd = mvnCmd.clone()
+ .withProperty('remotePom', remotePom)
+ .withProperty('updatePropertyVersions', updatePropertyVersions)
+ .withProperty('updateDependencies', updateDependencies)
+ .withProperty('generateBackupPoms', false)
+
+ if(project) {
+ newMvnCmd.withOptions(["-pl ${project}"])
+ }
+
+ newMvnCmd.run('versions:compare-dependencies')
+}
+
+def uploadLocalArtifacts(String mvnUploadCredsId, String artifactDir, String repoUrl) {
+ def zipFileName = 'artifacts'
+ withCredentials([usernameColonPassword(credentialsId: mvnUploadCredsId, variable: 'kieUnpack')]) {
+ dir(artifactDir) {
+ sh "zip -r ${zipFileName} ."
+ sh "curl --silent --upload-file ${zipFileName}.zip -u ${kieUnpack} -v ${repoUrl}"
+ }
+ }
+}
+
+def getLatestArtifactVersionFromRepository(String repositoryUrl, String groupId, String artifactId) {
+ return getMavenMetadata(repositoryUrl, groupId, artifactId).versioning?.latest?.text()
+}
+
+def getLatestArtifactVersionPrefixFromRepository(String repositoryUrl, String groupId, String artifactId, String versionPrefix) {
+ return getMavenMetadata(repositoryUrl, groupId, artifactId).versioning?.versions?.childNodes().collect{ it.text() }.findAll{ it.startsWith(versionPrefix) }.max()
+}
+
+def getMavenMetadata(String repositoryUrl, String groupId, String artifactId) {
+ def groupIdArtifactId = "${groupId.replaceAll("\\.", "/")}/${artifactId}"
+ return new XmlSlurper().parse("${repositoryUrl}/${groupIdArtifactId}/maven-metadata.xml")
+}
+
+String getProjectPomFromBuildCmd(String buildCmd) {
+ def pom = "pom.xml"
+ def fileOption = "-f"
+
+ def projectPom = "pom.xml"
+ regexF = "-f[ =]"
+ regexFile = "--file[ =]"
+ if (buildCmd =~ regexF || buildCmd =~ regexFile) {
+ projectPom = buildCmd.substring(buildCmd.indexOf(fileOption), buildCmd.indexOf(pom) + pom.length())
+ projectPom = projectPom.split("=| ")[1]
+ }
+ return projectPom;
+}
+
+/*
+* Clean Maven repository on the node
+*/
+void cleanRepository() {
+ sh 'rm -rf $HOME/.m2/repository'
+}
\ No newline at end of file
diff --git a/jenkins-pipeline-shared-libraries/vars/pullrequest.groovy b/jenkins-pipeline-shared-libraries/vars/pullrequest.groovy
new file mode 100644
index 000000000..ef2b111ce
--- /dev/null
+++ b/jenkins-pipeline-shared-libraries/vars/pullrequest.groovy
@@ -0,0 +1,33 @@
+/**
+* This method add a comment to current PR (for either ghprb or Github Branch Source plugin)
+*/
+void postComment(String commentText, String githubTokenCredsId = "kie-ci3-token") {
+ if (!CHANGE_ID && !ghprbPullId) {
+ error "Pull Request Id variable (ghprbPullId or CHANGE_ID) is not set. Are you sure you are running with Github Branch Source plugin or ghprb plugin?"
+ }
+ def changeId = CHANGE_ID ?: ghprbPullId
+ def changeRepository = CHANGE_ID ? getAuthorAndRepoForPr() : ghprbGhRepository
+ String filename = "${util.generateHash(10)}.build.summary"
+ def jsonComment = [
+ body : commentText
+ ]
+ writeJSON(json: jsonComment, file: filename)
+ sh "cat ${filename}"
+ withCredentials([string(credentialsId: githubTokenCredsId, variable: 'GITHUB_TOKEN')]) {
+ sh "curl -s -H \"Authorization: token ${GITHUB_TOKEN}\" -X POST -d '@${filename}' \"https://api.github.com/repos/${changeRepository}/issues/${changeId}/comments\""
+ }
+ sh "rm ${filename}"
+}
+
+String getAuthorAndRepoForPr() {
+ if (!env.CHANGE_FORK && !env.CHANGE_URL) {
+ error "CHANGE_FORK neither CHANGE_URL variables are set. Are you sure you're running with Github Branch Source plugin?"
+ }
+ if (env.CHANGE_FORK) {
+ return env.CHANGE_FORK
+ }
+ String fullUrl = env.CHANGE_URL
+ String urlWithoutProtocol = fullUrl.split('://')[1]
+ String path = urlWithoutProtocol.substring(urlWithoutProtocol.indexOf('/'))
+ return path.substring(1, path.indexOf('/pull/'))
+}
\ No newline at end of file
diff --git a/jenkins-pipeline-shared-libraries/vars/util.groovy b/jenkins-pipeline-shared-libraries/vars/util.groovy
new file mode 100644
index 000000000..30e00587f
--- /dev/null
+++ b/jenkins-pipeline-shared-libraries/vars/util.groovy
@@ -0,0 +1,549 @@
+/**
+ *
+ * @param projectUrl the github project url
+ */
+def getProject(String projectUrl) {
+ return (projectUrl =~ /((git|ssh|http(s)?)|(git@[\w\.]+))(:(\/\/)?(github.com\/))([\w\.@\:\/\-~]+)(\.git)(\/)?/)[0][8]
+}
+
+/**
+ *
+ * @param projectUrl the github project url
+ */
+def getGroup(String projectUrl) {
+ return getProjectGroupName(getProject(projectUrl))[0]
+}
+
+/**
+ * Returns an array containing group and name
+ *
+ * @param project the project
+ * @param defaultGroup the default project group. Optional.
+ */
+def getProjectGroupName(String project, String defaultGroup = "apache") {
+ def projectNameGroup = project.split("\\/")
+ def group = projectNameGroup.size() > 1 ? projectNameGroup[0] : defaultGroup
+ def name = projectNameGroup.size() > 1 ? projectNameGroup[1] : project
+ return [group, name]
+}
+
+/**
+ * Returns the path to the project dir
+ * @param projectGroupName
+ * @return
+ */
+def getProjectDirPath(String project, String defaultGroup = "apache") {
+ def projectGroupName = getProjectGroupName(project, defaultGroup)
+ return "${env.WORKSPACE}/${projectGroupName[0]}_${projectGroupName[1]}"
+}
+
+/**
+ *
+ * Stores git information into an env variable to be retrievable at any point of the pipeline
+ *
+ * @param projectName to store commit
+ */
+def storeGitInformation(String projectName) {
+ def gitInformationReport = env.GIT_INFORMATION_REPORT ? "${env.GIT_INFORMATION_REPORT}; " : ""
+ gitInformationReport += "${projectName}=${githubscm.getCommit().replace(';', '').replace('=', '')} Branch [${githubscm.getBranch().replace(';', '').replace('=', '')}] Remote [${githubscm.getRemoteInfo('origin', 'url').replace(';', '').replace('=', '')}]"
+ env.GIT_INFORMATION_REPORT = gitInformationReport
+
+ def gitHashes = env.GIT_INFORMATION_HASHES ? "${env.GIT_INFORMATION_HASHES};" : ""
+ gitHashes += "${projectName}=${githubscm.getCommitHash()}"
+ env.GIT_INFORMATION_HASHES = gitHashes
+}
+
+/**
+ *
+ * prints GIT_INFORMATION_REPORT variable
+ */
+def printGitInformationReport() {
+ if (env.GIT_INFORMATION_REPORT?.trim()) {
+ def result = env.GIT_INFORMATION_REPORT.split(';').inject([:]) { map, token ->
+ token.split('=').with { key, value ->
+ map[key.trim()] = value.trim()
+ }
+ map
+ }
+ def report = '''
+------------------------------------------
+GIT INFORMATION REPORT
+------------------------------------------
+'''
+ result.each { key, value ->
+ report += "${key}: ${value}\n"
+ }
+ println report
+ } else {
+ println '[WARNING] The variable GIT_INFORMATION_REPORT does not exist'
+ }
+}
+
+/*
+ * Get the next major/minor/micro version, with a specific suffix if needed.
+ * The version string needs to be in the form X.Y.Z
+*/
+
+def getNextVersion(String version, String type, String suffix = 'SNAPSHOT', boolean resetSubVersions = true) {
+ assert ['major', 'minor', 'micro'].contains(type)
+ Integer[] versionSplit = parseVersion(version)
+ if (versionSplit != null) {
+ int majorVersion = versionSplit[0] + (type == 'major' ? 1 : 0)
+ int minorVersion = resetSubVersions && type == 'major' ? 0 : (versionSplit[1] + (type == 'minor' ? 1 : 0))
+ int microVersion = resetSubVersions && (type == 'major' || type == 'minor') ? 0 : (versionSplit[2] + (type == 'micro' ? 1 : 0))
+ return "${majorVersion}.${minorVersion}.${microVersion}${suffix ? '-' + suffix : ''}"
+ } else {
+ return null
+ }
+}
+
+String getMajorMinorVersion(String version) {
+ try {
+ String[] versionSplit = version.split("\\.")
+ return "${versionSplit[0]}.${versionSplit[1]}"
+ } catch (err) {
+ println "[ERROR] ${version} cannot be reduced to Major.minor"
+ throw err
+ }
+}
+
+/*
+ * It parses a version string, which needs to be in the format X.Y.Z or X.Y.Z.suffix and returns the 3 numbers
+ * in an array. The optional suffix must not be numeric.
+ *
+ * Valid version examples:
+ * 1.0.0
+ * 1.0.0.Final
+*/
+
+Integer[] parseVersion(String version) {
+ String[] versionSplit = version.split("\\.")
+ boolean hasNonNumericSuffix = versionSplit.length == 4 && !(versionSplit[3].isNumber())
+ if (versionSplit.length == 3 || hasNonNumericSuffix) {
+ if (versionSplit[0].isNumber() && versionSplit[1].isNumber() && versionSplit[2].isNumber()) {
+ Integer[] vs = new Integer[3]
+ vs[0] = Integer.parseInt(versionSplit[0])
+ vs[1] = Integer.parseInt(versionSplit[1])
+ vs[2] = Integer.parseInt(versionSplit[2])
+ return vs
+ } else {
+ error "Version ${version} is not in the required format. The major, minor, and micro parts should contain only numeric characters."
+ }
+ } else {
+ error "Version ${version} is not in the required format X.Y.Z or X.Y.Z.suffix."
+ }
+}
+
+String getReleaseBranchFromVersion(String version) {
+ Integer[] versionSplit = parseVersion(version)
+ return "${versionSplit[0]}.${versionSplit[1]}.x"
+}
+
+String calculateTargetReleaseBranch(String currentReleaseBranch, int addToMajor = 0, int addToMinor = 0) {
+ String targetBranch = currentReleaseBranch
+ String [] versionSplit = targetBranch.split("\\.")
+ if (versionSplit.length == 3
+ && versionSplit[0].isNumber()
+ && versionSplit[1].isNumber()
+ && (versionSplit[2] == 'x' || versionSplit[2] == 'x-prod')) {
+ Integer newMajor = Integer.parseInt(versionSplit[0]) + addToMajor
+ Integer newMinor = Integer.parseInt(versionSplit[1]) + addToMinor
+ targetBranch = "${newMajor}.${newMinor}.${versionSplit[2]}"
+ } else {
+ println "Cannot parse targetBranch as release branch so going further with current value: ${targetBranch}"
+ }
+ return targetBranch
+}
+
+/**
+ * It prepares the environment to avoid problems with plugins. For example files from SCM pipeline are deleted during checkout
+ */
+def prepareEnvironment() {
+ println """
+[INFO] Preparing Environment
+[INFO] Copying WORKSPACE content env folder
+ """
+ def envFolderName = '.ci-env'
+ if (fileExists("${env.WORKSPACE}/${envFolderName}")) {
+ println "[WARNING] folder ${env.WORKSPACE}/${envFolderName} already exist, won't create env folder again."
+ } else {
+ dir(env.WORKSPACE) {
+ sh "mkdir ${envFolderName}"
+ sh "cp -r `ls -A | grep -v '${envFolderName}'` ${envFolderName}/"
+ }
+ }
+}
+
+/*
+* Generate a hash composed of alphanumeric characters (lowercase) of a given size
+*/
+
+String generateHash(int size) {
+ String alphabet = (('a'..'z') + ('0'..'9')).join("")
+ def random = new Random()
+ return (1..size).collect { alphabet[random.nextInt(alphabet.length())] }.join("")
+}
+
+String generateTempFile() {
+ return sh(returnStdout: true, script: 'mktemp').trim()
+}
+
+String generateTempFolder() {
+ return sh(returnStdout: true, script: 'mktemp -d').trim()
+}
+
+void executeWithCredentialsMap(Map credentials, Closure closure) {
+ if (credentials.token) {
+ withCredentials([string(credentialsId: credentials.token, variable: 'QUAY_TOKEN')]) {
+ closure()
+ }
+ } else if (credentials.usernamePassword) {
+ withCredentials([usernamePassword(credentialsId: credentials.usernamePassword, usernameVariable: 'QUAY_USER', passwordVariable: 'QUAY_TOKEN')]) {
+ closure()
+ }
+ } else {
+ error 'No credentials given to execute the given closure'
+ }
+}
+
+void cleanNode(String containerEngine = '') {
+ println '[INFO] Clean workspace'
+ cleanWs()
+ println '[INFO] Workspace cleaned'
+ println '[INFO] Cleanup Maven artifacts'
+ maven.cleanRepository()
+ println '[INFO] .m2/repository cleaned'
+ if (containerEngine) {
+ println "[INFO] Cleanup ${containerEngine} containers/images"
+ cloud.cleanContainersAndImages(containerEngine)
+ }
+}
+
+def spaceLeft() {
+ dir(env.WORKSPACE) {
+ println '[INFO] space left on the machine'
+ sh 'df -h'
+ println '[INFO] space of /home/jenkins'
+ sh "du -h -d1 /home/jenkins"
+ println '[INFO] space of workspace'
+ sh "du -h -d3 /home/jenkins/workspace"
+ }
+}
+
+def replaceInAllFilesRecursive(String findPattern, String oldValueSedPattern, String newSedValue) {
+ sh "find . -name '${findPattern}' -type f -exec sed -i 's/${oldValueSedPattern}/${newSedValue}/g' {} \\;"
+}
+
+/*
+* Removes any partial downloaded dependencies from .m2 if the previous run was interrupted and no post actions were
+* executed (cleanRepository()) and a new build is executed on the same machine
+*/
+def rmPartialDeps(){
+ dir("${env.WORKSPACE}/.m2") {
+ sh "find . -regex \".*\\.part\\(\\.lock\\)?\" -exec rm -rf {} \\;"
+ }
+}
+
+String retrieveConsoleLog(int numberOfLines = 100, String buildUrl = "${BUILD_URL}") {
+ return sh(returnStdout: true, script: "wget --no-check-certificate -qO - ${buildUrl}consoleText | tail -n ${numberOfLines}")
+}
+
+String archiveConsoleLog(String id = '', int numberOfLines = 100, String buildUrl = "${BUILD_URL}") {
+ String filename = "${id ? "${id}_" : ''}console.log"
+ sh "rm -rf ${filename}"
+ writeFile(text: retrieveConsoleLog(numberOfLines, buildUrl), file: filename)
+ archiveArtifacts(artifacts: filename)
+}
+
+def retrieveTestResults(String buildUrl = "${BUILD_URL}") {
+ return readJSON(text: sh(returnStdout: true, script: "wget --no-check-certificate -qO - ${buildUrl}testReport/api/json?depth=1"))
+}
+
+def retrieveFailedTests(String buildUrl = "${BUILD_URL}") {
+ def testResults = retrieveTestResults(buildUrl)
+
+ def allCases = []
+ testResults.suites?.each { testSuite ->
+ allCases.addAll(testSuite.cases)
+ }
+
+ def failedTests = []
+ testResults.suites?.each { testSuite ->
+ testSuite.cases?.each { testCase ->
+ if (!['PASSED', 'SKIPPED', 'FIXED'].contains(testCase.status)) {
+ def failedTest = [:]
+
+ boolean hasSameNameCases = allCases.findAll { it.name == testCase.name && it.className == testCase.className }.size() > 1
+
+ failedTest.status = testCase.status
+
+ // Retrieve class name
+ fullClassName = testCase.className
+ int lastIndexOf = fullClassName.lastIndexOf('.')
+ packageName = fullClassName.substring(0, lastIndexOf)
+ className = fullClassName.substring(lastIndexOf + 1)
+
+ failedTest.name = testCase.name
+ failedTest.packageName = packageName
+ failedTest.className = className
+ failedTest.enclosingBlockNames = testSuite.enclosingBlockNames?.reverse()?.join(' / ')
+
+ failedTest.fullName = "${packageName}.${className}.${failedTest.name}"
+ // If other cases have the same className / name, Jenkins uses the enclosingBlockNames for the URL distinction
+ if (hasSameNameCases && testSuite.enclosingBlockNames) {
+ failedTest.fullName = "${testSuite.enclosingBlockNames.reverse().join(' / ')} / ${failedTest.fullName}"
+ }
+
+ // Construct test url
+ String urlLeaf = ''
+ // If other cases have the same className / name, Jenkins uses the enclosingBlockNames for the URL distinction
+ if (hasSameNameCases && testSuite.enclosingBlockNames) {
+ urlLeaf += testSuite.enclosingBlockNames.reverse().join('___')
+ }
+ urlLeaf += urlLeaf ? '___' : urlLeaf
+ urlLeaf += "${failedTest.name == "(?)" ? "___" : failedTest.name}/"
+ urlLeaf = urlLeaf.replaceAll(' ', '_')
+ .replaceAll('&', '_')
+ .replaceAll('-', '_')
+ failedTest.url = "${buildUrl}testReport/${packageName}/${className}/${urlLeaf}"
+
+ failedTest.details = [null, 'null'].contains(testCase.errorDetails) ? '' : testCase.errorDetails
+ failedTest.stacktrace = [null, 'null'].contains(testCase.errorStackTrace) ? '' : testCase.errorStackTrace
+ failedTests.add(failedTest)
+ }
+ }
+ }
+
+ return failedTests
+}
+
+String retrieveArtifact(String artifactPath, String buildUrl = "${BUILD_URL}") {
+ String finalUrl = "${buildUrl}artifact/${artifactPath}"
+ String httpCode = sh(returnStdout: true, script: "curl -o /dev/null --silent -Iw '%{http_code}' ${finalUrl}")
+ return httpCode == "200" ? sh(returnStdout: true, script: "wget --no-check-certificate -qO - ${finalUrl}") : ''
+}
+
+def retrieveJobInformation(String buildUrl = "${BUILD_URL}") {
+ return readJSON(text: sh(returnStdout: true, script: "wget --no-check-certificate -qO - ${buildUrl}api/json?depth=0"))
+}
+
+boolean isJobResultSuccess(String jobResult) {
+ return jobResult == 'SUCCESS'
+}
+
+boolean isJobResultFailure(String jobResult) {
+ return jobResult == 'FAILURE'
+}
+
+boolean isJobResultAborted(String jobResult) {
+ return jobResult == 'ABORTED'
+}
+
+boolean isJobResultUnstable(String jobResult) {
+ return jobResult == 'UNSTABLE'
+}
+
+/*
+* Return the build/test summary of a job
+*
+* outputStyle possibilities: 'ZULIP' (default), 'GITHUB'
+*/
+String getMarkdownTestSummary(String jobId = '', String additionalInfo = '', String buildUrl = "${BUILD_URL}", String outputStyle = 'ZULIP') {
+ def jobInfo = retrieveJobInformation(buildUrl)
+
+ // Check if any *_console.log is available as artifact first
+ String defaultConsoleLogId = 'Console Logs'
+ Map consoleLogs = jobInfo.artifacts?.collect { it.fileName }
+ .findAll { it.endsWith('console.log') }
+ .collectEntries { filename ->
+ int index = filename.lastIndexOf('_')
+ String logId = index > 0 ? filename.substring(0, index) : defaultConsoleLogId
+ return [ (logId) : retrieveArtifact(filename, buildUrl) ]
+ } ?: [ (defaultConsoleLogId) : retrieveConsoleLog(50, buildUrl)]
+
+ String jobResult = jobInfo.result
+ String summary = """
+${jobId ? "**${jobId} job**" : 'Job'} ${formatBuildNumber(outputStyle, BUILD_NUMBER)} was: **${jobResult}**
+"""
+
+ if (!isJobResultSuccess(jobResult)) {
+ summary += "Possible explanation: ${getResultExplanationMessage(jobResult)}\n"
+ }
+
+ if (additionalInfo) {
+ summary += """
+${additionalInfo}
+"""
+ }
+
+ if (!isJobResultSuccess(jobResult)) {
+ boolean testResultsFound = false
+ summary += "\nPlease look here: ${buildUrl}display/redirect"
+
+ try {
+ def testResults = retrieveTestResults(buildUrl)
+ def failedTests = retrieveFailedTests(buildUrl)
+ testResultsFound=true
+
+ summary += """
+\n**Test results:**
+- PASSED: ${testResults.passCount}
+- FAILED: ${testResults.failCount}
+"""
+
+ summary += 'GITHUB'.equalsIgnoreCase(outputStyle) ? """
+Those are the test failures: ${failedTests.size() <= 0 ? 'none' : '\n'}${failedTests.collect { failedTest ->
+ return """
+${failedTest.fullName}
+${formatTextForHtmlDisplay(failedTest.details ?: failedTest.stacktrace)}
+ """
+}.join('\n')}
+"""
+ : """
+Those are the test failures: ${failedTests.size() <= 0 ? 'none' : '\n'}${failedTests.collect { failedTest ->
+ return """```spoiler [${failedTest.fullName}](${failedTest.url})
+${failedTest.details ?: failedTest.stacktrace}
+```"""
+}.join('\n')}
+"""
+ } catch (err) {
+ echo 'No test results found'
+ }
+
+ // Display console logs if no test results found
+ if (!(jobResult == 'UNSTABLE' && testResultsFound)) {
+ summary += 'GITHUB'.equalsIgnoreCase(outputStyle) ? """
+See console log:
+${consoleLogs.collect { key, value ->
+return """
+${key}
+${formatTextForHtmlDisplay(value)}
+
+"""
+}.join('')}"""
+ : """
+See console log:
+${consoleLogs.collect { key, value ->
+return """```spoiler ${key}
+${value}
+```
+"""
+}.join('')}"""
+ }
+ }
+
+ return summary
+}
+
+String getResultExplanationMessage(String jobResult) {
+ switch (jobResult) {
+ case 'SUCCESS':
+ return 'Do I need to explain ?'
+ case 'UNSTABLE':
+ return 'This should be test failures'
+ case 'FAILURE':
+ return 'Pipeline failure or project build failure'
+ case 'ABORTED':
+ return 'Most probably a timeout, please review'
+ default:
+ return 'Woops ... I don\'t know about this result value ... Please ask maintainer.'
+ }
+}
+
+String formatTextForHtmlDisplay(String text) {
+ return text.replaceAll('\n', '
')
+}
+
+String formatBuildNumber(String outputStyle, String buildNumber) {
+ return 'GITHUB'.equalsIgnoreCase(outputStyle) ? "`#${buildNumber}`" : "#${buildNumber}"
+}
+
+/**
+ * Encode the provided string value in the provided encoding
+ * @param value string to encode
+ * @param encoding [default UTF-8]
+ * @return the encoded string
+ */
+String encode(String value, String encoding='UTF-8') {
+ return URLEncoder.encode(value, encoding)
+}
+
+/**
+ * Serialize the parameters converting a Map into an URL query string, like:
+ * {A: 1, B: 2} --> 'A=1&B=2'
+ * @param params key-value map representation of the parameters
+ * @return URL query string
+ */
+String serializeQueryParams(Map params) {
+ return params.collect { "${it.getKey()}=${encode(it.getValue() as String)}" }.join('&')
+}
+
+/**
+ * Execute the provided closure within Kerberos authentication context
+ * @param keytabId id of the keytab to be used
+ * @param closure code to run in the kerberos auth context
+ * @param domain kerberos domain to look for into the keytab
+ * @param retry number of max retries to perform if kinit fails
+ */
+def withKerberos(String keytabId, Closure closure, String domain = 'REDHAT.COM', int nRetries = 5) {
+ withCredentials([file(credentialsId: keytabId, variable: 'KEYTAB_FILE')]) {
+ env.KERBEROS_PRINCIPAL = sh(returnStdout: true, script: "klist -kt $KEYTAB_FILE |grep $domain | awk -F' ' 'NR==1{print \$4}' ").trim()
+
+ if (!env.KERBEROS_PRINCIPAL?.trim()) {
+ throw new Exception("[ERROR] found blank KERBEROS_PRINCIPAL, kerberos authetication failed.")
+ }
+
+ // check if kerberos authentication already exists with provided principal
+ def currentPrincipal = sh(returnStdout: true, script: "klist | grep -i 'Default principal' | awk -F':' 'NR==1{print \$2}' ").trim()
+
+ if (currentPrincipal != env.KERBEROS_PRINCIPAL) {
+ def kerberosStatus = 0
+ for (int i = 0; i < nRetries; i++) {
+ kerberosStatus = sh(returnStatus: true, script: "kinit ${env.KERBEROS_PRINCIPAL} -kt $KEYTAB_FILE")
+ if (kerberosStatus == 0) {
+ // exit at first success
+ break
+ }
+ }
+
+ // if the kerberos status is still != 0 after nRetries throw exception
+ if (kerberosStatus != 0) {
+ throw new Exception("[ERROR] kinit failed with non-zero status.")
+ }
+ } else {
+ println "[INFO] ${env.KERBEROS_PRINCIPAL} already authenticated, skipping kinit."
+ }
+
+ closure()
+ }
+}
+
+def runWithPythonVirtualEnv(String cmd, String virtualEnvName, boolean returnStdout = false) {
+ return sh(returnStdout: returnStdout, script: """
+source ~/virtenvs/${virtualEnvName}/bin/activate
+${cmd}
+""")
+}
+
+int getJobDurationInSeconds() {
+ long startTimestamp = retrieveJobInformation().timestamp
+ long currentTimestamp = new Date().getTime()
+ return (int) ((currentTimestamp - startTimestamp) / 1000)
+}
+
+String displayDurationFromSeconds(int durationInSec) {
+ String result = ''
+ int seconds = durationInSec
+ int minutes = durationInSec / 60
+ if (minutes > 0) {
+ seconds = seconds - minutes * 60
+ int hours = minutes / 60
+ if (hours > 0) {
+ minutes = minutes - hours*60
+ result += "${hours}h"
+ }
+ result += "${minutes}m"
+ }
+ result += "${seconds}s"
+ return result
+}