diff --git a/dist/pom.xml b/dist/pom.xml index 7b2dd97f418..1c7b0505917 100644 --- a/dist/pom.xml +++ b/dist/pom.xml @@ -179,23 +179,6 @@ - - org.codehaus.mojo - exec-maven-plugin - - - if_modified_files - verify - - exec - - - bash - -c 'export MODIFIED=$(git status --porcelain | grep "^ M"); [[ -z $MODIFIED ]] && exit 0 || { echo -e "found modified files during mvn verify:\n$MODIFIED"; exit 1;}' - - - - org.apache.rat apache-rat-plugin @@ -208,4 +191,35 @@ + + + + pre-merge + + + + org.codehaus.mojo + exec-maven-plugin + + + if_modified_files + verify + + exec + + + bash + -c 'export MODIFIED=$(git status --porcelain | grep "^ M"); [[ -z $MODIFIED ]] && exit 0 || { echo -e "found modified files during mvn verify:\n$MODIFIED"; exit 1;}' + + + + + + + + false + + + + diff --git a/jenkins/spark-premerge-build.sh b/jenkins/spark-premerge-build.sh index 456376149b9..9e66b97c770 100755 --- a/jenkins/spark-premerge-build.sh +++ b/jenkins/spark-premerge-build.sh @@ -37,7 +37,7 @@ export PATH="$SPARK_HOME/bin:$SPARK_HOME/sbin:$PATH" tar zxf $SPARK_HOME.tgz -C $ARTF_ROOT && \ rm -f $SPARK_HOME.tgz -mvn -U -B $MVN_URM_MIRROR '-P!snapshot-shims' clean verify -Dpytest.TEST_TAGS='' -Dpytest.TEST_TYPE="pre-commit" -Dpytest.TEST_PARALLEL=4 +mvn -U -B $MVN_URM_MIRROR '-P!snapshot-shims,pre-merge' clean verify -Dpytest.TEST_TAGS='' -Dpytest.TEST_TYPE="pre-commit" -Dpytest.TEST_PARALLEL=4 # Run the unit tests for other Spark versions but dont run full python integration tests env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark301tests,snapshot-shims test -Dpytest.TEST_TAGS='' env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark311tests,snapshot-shims test -Dpytest.TEST_TAGS='' diff --git a/tests/README.md b/tests/README.md index 6a302349c55..06d090b21af 100644 --- a/tests/README.md +++ b/tests/README.md @@ -63,7 +63,7 @@ Apache Spark specific configurations can be passed in by setting the `SPARK_CONF variable. Examples: -- To run tests against Apache Spark 3.1.0, +- To run tests against Apache Spark 3.1.1, `mvn -P spark311tests test` - To pass Apache Spark configs `--conf spark.dynamicAllocation.enabled=false --conf spark.task.cpus=1` do something like. `SPARK_CONF="spark.dynamicAllocation.enabled=false,spark.task.cpus=1" mvn ...`