diff --git a/jenkins/spark-nightly-build.sh b/jenkins/spark-nightly-build.sh index 9ef541571031..c83856d3ff82 100755 --- a/jenkins/spark-nightly-build.sh +++ b/jenkins/spark-nightly-build.sh @@ -22,6 +22,9 @@ set -ex ## export 'M2DIR' so that shims can get the correct Spark dependency info export M2DIR="$WORKSPACE/.m2" +## maven options for building, e.g. '-Dspark-rapids-jni.version=xxx' to specify spark-rapids-jni dependency's version. +MVN_OPT=${MVN_OPT:-''} + TOOL_PL=${TOOL_PL:-"tools"} DIST_PL="dist" function mvnEval { @@ -64,7 +67,7 @@ function distWithReducedPom { ;; esac - mvn -B $mvnCmd $MVN_URM_MIRROR \ + mvn $MVN_OPT -B $mvnCmd $MVN_URM_MIRROR \ -Dcuda.version=$CUDA_CLASSIFIER \ -Dmaven.repo.local=$M2DIR \ -Dfile="${DIST_FPATH}.jar" \ @@ -76,9 +79,9 @@ function distWithReducedPom { } # build the Spark 2.x explain jar -mvn -B $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR -Dbuildver=24X clean install -DskipTests +mvn $MVN_OPT -B $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR -Dbuildver=24X clean install -DskipTests [[ $SKIP_DEPLOY != 'true' ]] && \ - mvn -B deploy $MVN_URM_MIRROR \ + mvn $MVN_OPT -B deploy $MVN_URM_MIRROR \ -Dmaven.repo.local=$M2DIR \ -DskipTests \ -Dbuildver=24X @@ -91,20 +94,20 @@ mvn -B $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR -Dbuildver=24X clean install -D for buildver in "${SPARK_SHIM_VERSIONS[@]:1}"; do # temporarily skip tests on Spark 3.3.0 - https://github.com/NVIDIA/spark-rapids/issues/4031 [[ $buildver == "330" ]] && skipTestsFor330=true || skipTestsFor330=false - mvn -U -B clean install -pl '!tools' $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR \ + mvn $MVN_OPT -U -B clean install -pl '!tools' $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR \ -Dcuda.version=$CUDA_CLASSIFIER \ -Dbuildver="${buildver}" \ -DskipTests="${skipTestsFor330}" distWithReducedPom "install" [[ $SKIP_DEPLOY != 'true' ]] && \ - mvn -B deploy -pl '!tools,!dist' $MVN_URM_MIRROR \ + mvn $MVN_OPT -B deploy -pl '!tools,!dist' $MVN_URM_MIRROR \ -Dmaven.repo.local=$M2DIR \ -Dcuda.version=$CUDA_CLASSIFIER \ -DskipTests \ -Dbuildver="${buildver}" done -mvn -B clean install -pl '!tools' \ +mvn $MVN_OPT -B clean install -pl '!tools' \ $DIST_PROFILE_OPT \ -Dbuildver=$SPARK_BASE_SHIM_VERSION \ $MVN_URM_MIRROR \ @@ -118,7 +121,7 @@ if [[ $SKIP_DEPLOY != 'true' ]]; then distWithReducedPom "deploy" # this deploy includes 'tools' that is unconditionally built with Spark 3.1.1 - mvn -B deploy -pl '!dist' \ + mvn $MVN_OPT -B deploy -pl '!dist' \ -Dbuildver=$SPARK_BASE_SHIM_VERSION \ $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR \ -Dcuda.version=$CUDA_CLASSIFIER \ diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/Plugin.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/Plugin.scala index 21d8eb41f575..345702890e5c 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/Plugin.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/Plugin.scala @@ -313,11 +313,18 @@ object RapidsExecutorPlugin { * patch version then the actual patch version must be greater than or equal. * For example, version 7.1 is not satisfied by version 7.2, but version 7.1 is satisfied by * version 7.1.1. + * If the expected cudf version is a specified 'timestamp-seq' one, then it is satisfied by + * the SNAPSHOT version. + * For example, version 7.1-yyyymmdd.hhmmss-seq is satisfied by version 7.1-SNAPSHOT. */ def cudfVersionSatisfied(expected: String, actual: String): Boolean = { val expHyphen = if (expected.indexOf('-') >= 0) expected.indexOf('-') else expected.length val actHyphen = if (actual.indexOf('-') >= 0) actual.indexOf('-') else actual.length - if (actual.substring(actHyphen) != expected.substring(expHyphen)) return false + if (actual.substring(actHyphen) != expected.substring(expHyphen) && + !(actual.substring(actHyphen) == "-SNAPSHOT" && + expected.substring(expHyphen).matches("-([0-9]{8}).([0-9]{6})-([1-9][0-9]*)"))) { + return false + } val (expMajorMinor, expPatch) = expected.substring(0, expHyphen).split('.').splitAt(2) val (actMajorMinor, actPatch) = actual.substring(0, actHyphen).split('.').splitAt(2)