diff --git a/docs/configs.md b/docs/configs.md index 52b2dd83a89..85ababd5425 100644 --- a/docs/configs.md +++ b/docs/configs.md @@ -10,7 +10,7 @@ The following is the list of options that `rapids-plugin-4-spark` supports. On startup use: `--conf [conf key]=[conf value]`. For example: ``` -${SPARK_HOME}/bin/spark --jars 'rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar,cudf-0.17-SNAPSHOT-cuda10-1.jar' \ +${SPARK_HOME}/bin/spark --jars 'rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar,cudf-0.18-SNAPSHOT-cuda10-1.jar' \ --conf spark.plugins=com.nvidia.spark.SQLPlugin \ --conf spark.rapids.sql.incompatibleOps.enabled=true ``` diff --git a/docs/get-started/Dockerfile.cuda b/docs/get-started/Dockerfile.cuda index 3ff455b3a78..d503b455456 100644 --- a/docs/get-started/Dockerfile.cuda +++ b/docs/get-started/Dockerfile.cuda @@ -53,7 +53,7 @@ COPY spark-3.0.1-bin-hadoop3.2/examples /opt/spark/examples COPY spark-3.0.1-bin-hadoop3.2/kubernetes/tests /opt/spark/tests COPY spark-3.0.1-bin-hadoop3.2/data /opt/spark/data -COPY cudf-0.17-SNAPSHOT-cuda10-1.jar /opt/sparkRapidsPlugin +COPY cudf-0.18-SNAPSHOT-cuda10-1.jar /opt/sparkRapidsPlugin COPY rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar /opt/sparkRapidsPlugin COPY getGpusResources.sh /opt/sparkRapidsPlugin diff --git a/docs/get-started/getting-started-on-prem.md b/docs/get-started/getting-started-on-prem.md index d7653e9320c..1936e5a65c8 100644 --- a/docs/get-started/getting-started-on-prem.md +++ b/docs/get-started/getting-started-on-prem.md @@ -55,7 +55,7 @@ CUDA and will not run on other versions. The jars use a maven classifier to keep - CUDA 11.0 => classifier cuda11 For example, here is a sample version of the jars and cudf with CUDA 10.1 support: -- cudf-0.17-SNAPSHOT-cuda10-1.jar +- cudf-0.18-SNAPSHOT-cuda10-1.jar - rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar @@ -63,7 +63,7 @@ For simplicity export the location to these jars. This example assumes the sampl been placed in the `/opt/sparkRapidsPlugin` directory: ```shell export SPARK_RAPIDS_DIR=/opt/sparkRapidsPlugin -export SPARK_CUDF_JAR=${SPARK_RAPIDS_DIR}/cudf-0.17-SNAPSHOT-cuda10-1.jar +export SPARK_CUDF_JAR=${SPARK_RAPIDS_DIR}/cudf-0.18-SNAPSHOT-cuda10-1.jar export SPARK_RAPIDS_PLUGIN_JAR=${SPARK_RAPIDS_DIR}/rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar ``` diff --git a/integration_tests/README.md b/integration_tests/README.md index 88087884a64..50eb0f6d112 100644 --- a/integration_tests/README.md +++ b/integration_tests/README.md @@ -128,7 +128,7 @@ Most clusters probably will not have the RAPIDS plugin installed in the cluster If you just want to verify the SQL replacement is working you will need to add the `rapids-4-spark` and `cudf` jars to your `spark-submit` command. ``` -$SPARK_HOME/bin/spark-submit --jars "rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar,cudf-0.17-SNAPSHOT.jar" ./runtests.py +$SPARK_HOME/bin/spark-submit --jars "rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar,cudf-0.18-SNAPSHOT.jar" ./runtests.py ``` You don't have to enable the plugin for this to work, the test framework will do that for you. @@ -180,7 +180,7 @@ The TPCxBB, TPCH, TPCDS, and Mortgage tests in this framework can be enabled by As an example, here is the `spark-submit` command with the TPCxBB parameters: ``` -$SPARK_HOME/bin/spark-submit --jars "rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar,cudf-0.17-SNAPSHOT.jar,rapids-4-spark-tests_2.12-0.4.0-SNAPSHOT.jar" ./runtests.py --tpcxbb_format="csv" --tpcxbb_path="/path/to/tpcxbb/csv" +$SPARK_HOME/bin/spark-submit --jars "rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar,cudf-0.18-SNAPSHOT.jar,rapids-4-spark-tests_2.12-0.4.0-SNAPSHOT.jar" ./runtests.py --tpcxbb_format="csv" --tpcxbb_path="/path/to/tpcxbb/csv" ``` Be aware that running these tests with read data requires at least an entire GPU, and preferable several GPUs/executors @@ -209,7 +209,7 @@ To run cudf_udf tests, need following configuration changes: As an example, here is the `spark-submit` command with the cudf_udf parameter: ``` -$SPARK_HOME/bin/spark-submit --jars "rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar,cudf-0.17-SNAPSHOT.jar,rapids-4-spark-tests_2.12-0.4.0-SNAPSHOT.jar" --conf spark.rapids.memory.gpu.allocFraction=0.3 --conf spark.rapids.python.memory.gpu.allocFraction=0.3 --conf spark.rapids.python.concurrentPythonWorkers=2 --py-files "rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar" --conf spark.executorEnv.PYTHONPATH="rapids-4-spark_2.12-0.2.0-SNAPSHOT.jar" ./runtests.py --cudf_udf +$SPARK_HOME/bin/spark-submit --jars "rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar,cudf-0.18-SNAPSHOT.jar,rapids-4-spark-tests_2.12-0.4.0-SNAPSHOT.jar" --conf spark.rapids.memory.gpu.allocFraction=0.3 --conf spark.rapids.python.memory.gpu.allocFraction=0.3 --conf spark.rapids.python.concurrentPythonWorkers=2 --py-files "rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar" --conf spark.executorEnv.PYTHONPATH="rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar" ./runtests.py --cudf_udf ``` ## Writing tests diff --git a/jenkins/Dockerfile-blossom.integration.centos7 b/jenkins/Dockerfile-blossom.integration.centos7 index 53b992870ff..2ccfc232af3 100644 --- a/jenkins/Dockerfile-blossom.integration.centos7 +++ b/jenkins/Dockerfile-blossom.integration.centos7 @@ -18,7 +18,7 @@ # # Arguments: # CUDA_VER=10.1 or 10.2 -# CUDF_VER=0.16 or 0.17-SNAPSHOT +# CUDF_VER=0.16, 0.17-SNAPSHOT, or 0.18-SNAPSHOT # URM_URL= ### ARG CUDA_VER=10.1 diff --git a/jenkins/printJarVersion.sh b/jenkins/printJarVersion.sh index bfd3262529b..666c63218cd 100755 --- a/jenkins/printJarVersion.sh +++ b/jenkins/printJarVersion.sh @@ -24,7 +24,7 @@ function print_ver(){ SERVER_ID=$5 # Collect snapshot dependency info only in Jenkins build - # In dev build, print 'SNAPSHOT' tag without time stamp, e.g.: cudf-0.17-SNAPSHOT.jar + # In dev build, print 'SNAPSHOT' tag without time stamp, e.g.: cudf-0.18-SNAPSHOT.jar if [[ "$VERSION" == *"-SNAPSHOT" && -n "$JENKINS_URL" ]]; then PREFIX=${VERSION%-SNAPSHOT} # List the latest SNAPSHOT jar file in the maven repo diff --git a/jenkins/version-def.sh b/jenkins/version-def.sh index 15f528eb9ed..9f60d4782bd 100755 --- a/jenkins/version-def.sh +++ b/jenkins/version-def.sh @@ -26,7 +26,7 @@ for VAR in $OVERWRITE_PARAMS;do done IFS=$PRE_IFS -CUDF_VER=${CUDF_VER:-"0.17-SNAPSHOT"} +CUDF_VER=${CUDF_VER:-"0.18-SNAPSHOT"} CUDA_CLASSIFIER=${CUDA_CLASSIFIER:-"cuda10-1"} PROJECT_VER=${PROJECT_VER:-"0.4.0-SNAPSHOT"} SPARK_VER=${SPARK_VER:-"3.0.0"} diff --git a/pom.xml b/pom.xml index 500615d6f32..531afdfbbae 100644 --- a/pom.xml +++ b/pom.xml @@ -143,7 +143,7 @@ 1.8 3.0.0 cuda10-1 - 0.17-SNAPSHOT + 0.18-SNAPSHOT 2.12 2.12.8 1.5.8 diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsConf.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsConf.scala index ea8b5d500cb..f0da76f6654 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsConf.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsConf.scala @@ -824,7 +824,7 @@ object RapidsConf { |On startup use: `--conf [conf key]=[conf value]`. For example: | |``` - |${SPARK_HOME}/bin/spark --jars 'rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar,cudf-0.17-SNAPSHOT-cuda10-1.jar' \ + |${SPARK_HOME}/bin/spark --jars 'rapids-4-spark_2.12-0.4.0-SNAPSHOT.jar,cudf-0.18-SNAPSHOT-cuda10-1.jar' \ |--conf spark.plugins=com.nvidia.spark.SQLPlugin \ |--conf spark.rapids.sql.incompatibleOps.enabled=true |```