Skip to content

Commit

Permalink
Bump up pre-merge OS from ubuntu 16 to ubuntu 18 [skip ci] (NVIDIA#2033)
Browse files Browse the repository at this point in the history
* bump up pre-merge OS and cuda version

* add ubuntu version as an arg

* nointeractive for tzdata

* force to use jdk 8 for testing

Signed-off-by: Peixin Li <pxli@nyu.edu>
  • Loading branch information
pxLi authored Mar 29, 2021
1 parent 8e6762d commit c1daf1d
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 28 deletions.
4 changes: 2 additions & 2 deletions jenkins/Dockerfile-blossom.integration.centos7
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
###
#
# Arguments:
# CUDA_VER=10.1 or 10.2
# CUDF_VER=0.18 or 0.19-SNAPSHOT
# CUDA_VER=10.1, 10.2 or 11.0
# CUDF_VER=0.18 or 0.19
# URM_URL=<maven repo url>
###
ARG CUDA_VER=10.1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,22 +18,24 @@
#
# Build the image for rapids-plugin development environment
#
# Arguments: CUDA_VER=10.1 or 10.2
#
# Arguments:
# CUDA_VER=10.1, 10.2 or 11.0
# UBUNTU_VER=18.04 or 20.04
###

ARG CUDA_VER=10.1

FROM nvidia/cuda:${CUDA_VER}-runtime-ubuntu16.04
ARG CUDA_VER=11.0
ARG UBUNTU_VER=18.04
FROM nvidia/cuda:${CUDA_VER}-runtime-ubuntu${UBUNTU_VER}

#Install java-8, maven, docker image
RUN apt-get update -y && \
apt-get install -y software-properties-common
RUN add-apt-repository ppa:deadsnakes/ppa && \
apt-get update -y && \
apt-get install -y maven \
DEBIAN_FRONTEND="noninteractive" apt-get install -y maven \
openjdk-8-jdk python3.8 python3.8-distutils python3-setuptools tzdata git
RUN python3.8 -m easy_install pip
RUN update-java-alternatives --set /usr/lib/jvm/java-1.8.0-openjdk-amd64

RUN ln -s /usr/bin/python3.8 /usr/bin/python
RUN python -m pip install pytest sre_yield requests pandas pyarrow findspark pytest-xdist
Expand Down
11 changes: 5 additions & 6 deletions jenkins/Jenkinsfile-blossom.premerge
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@ def pluginPremerge

def githubHelper // blossom github helper
def TEMP_IMAGE_BUILD = true
def PREMERGE_DOCKERFILE = 'jenkins/Dockerfile-blossom.ubuntu16'
def CUDA_NAME = 'cuda11.0' // hardcode cuda version for docker build part
def PREMERGE_DOCKERFILE = 'jenkins/Dockerfile-blossom.ubuntu'
def IMAGE_PREMERGE // temp image for premerge test
def PREMERGE_TAG
def skipped = false
Expand Down Expand Up @@ -60,6 +61,7 @@ pipeline {
URM_URL = "https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"
PVC = credentials("pvc")
CUSTOM_WORKSPACE = "/home/jenkins/agent/workspace/${BUILD_TAG}"
CUDA_CLASSIFIER = 'cuda11'
}

stages {
Expand Down Expand Up @@ -116,9 +118,6 @@ pipeline {
)

container('docker-build') {
def CUDA_NAME = sh(returnStdout: true,
script: '. jenkins/version-def.sh>&2 && echo -n $CUDA_CLASSIFIER | sed "s/-/./g"')

// check if pre-merge dockerfile modified
def dockerfileModified = sh(returnStdout: true,
script: 'BASE=$(git --no-pager log --oneline -1 | awk \'{ print $NF }\'); ' +
Expand All @@ -129,15 +128,15 @@ pipeline {
}

if (TEMP_IMAGE_BUILD) {
IMAGE_TAG = "dev-ubuntu16-${CUDA_NAME}"
IMAGE_TAG = "dev-ubuntu18-${CUDA_NAME}"
PREMERGE_TAG = "${IMAGE_TAG}-${BUILD_TAG}"
IMAGE_PREMERGE = "${ARTIFACTORY_NAME}/sw-spark-docker-local/plugin:${PREMERGE_TAG}"
def CUDA_VER = "$CUDA_NAME" - "cuda"
docker.build(IMAGE_PREMERGE, "-f ${PREMERGE_DOCKERFILE} --build-arg CUDA_VER=$CUDA_VER -t $IMAGE_PREMERGE .")
uploadDocker(IMAGE_PREMERGE)
} else {
// if no pre-merge dockerfile change, use nightly image
IMAGE_PREMERGE = "$ARTIFACTORY_NAME/sw-spark-docker-local/plugin:dev-ubuntu16-$CUDA_NAME-blossom-dev"
IMAGE_PREMERGE = "$ARTIFACTORY_NAME/sw-spark-docker-local/plugin:dev-ubuntu18-$CUDA_NAME-blossom-dev"
}


Expand Down
15 changes: 8 additions & 7 deletions jenkins/spark-nightly-build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,15 @@ set -ex

## export 'M2DIR' so that shims can get the correct cudf/spark dependency info
export M2DIR="$WORKSPACE/.m2"
mvn -U -B -Pinclude-databricks,snapshot-shims clean deploy $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR -Dpytest.TEST_TAGS='' -Dpytest.TEST_TYPE="nightly"
mvn -U -B -Pinclude-databricks,snapshot-shims clean deploy $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR \
-Dpytest.TEST_TAGS='' -Dpytest.TEST_TYPE="nightly" -Dcuda.version=$CUDA_CLASSIFIER
# Run unit tests against other spark versions
mvn -U -B -Pspark301tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR
mvn -U -B -Pspark302tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR
mvn -U -B -Pspark303tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR
mvn -U -B -Pspark311tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR
mvn -U -B -Pspark312tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR
mvn -U -B -Pspark320tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR
mvn -U -B -Pspark301tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR -Dcuda.version=$CUDA_CLASSIFIER
mvn -U -B -Pspark302tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR -Dcuda.version=$CUDA_CLASSIFIER
mvn -U -B -Pspark303tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR -Dcuda.version=$CUDA_CLASSIFIER
mvn -U -B -Pspark311tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR -Dcuda.version=$CUDA_CLASSIFIER
mvn -U -B -Pspark312tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR -Dcuda.version=$CUDA_CLASSIFIER
mvn -U -B -Pspark320tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$M2DIR -Dcuda.version=$CUDA_CLASSIFIER

# Parse cudf and spark files from local mvn repo
jenkins/printJarVersion.sh "CUDFVersion" "$M2DIR/ai/rapids/cudf/${CUDF_VER}" "cudf-${CUDF_VER}" "-${CUDA_CLASSIFIER}.jar" $SERVER_ID
Expand Down
15 changes: 8 additions & 7 deletions jenkins/spark-premerge-build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,16 @@ export PATH="$SPARK_HOME/bin:$SPARK_HOME/sbin:$PATH"
tar zxf $SPARK_HOME.tgz -C $ARTF_ROOT && \
rm -f $SPARK_HOME.tgz

mvn -U -B $MVN_URM_MIRROR '-P!snapshot-shims,pre-merge' clean verify -Dpytest.TEST_TAGS='' -Dpytest.TEST_TYPE="pre-commit" -Dpytest.TEST_PARALLEL=4
mvn -U -B $MVN_URM_MIRROR '-P!snapshot-shims,pre-merge' clean verify -Dpytest.TEST_TAGS='' \
-Dpytest.TEST_TYPE="pre-commit" -Dpytest.TEST_PARALLEL=4 -Dcuda.version=$CUDA_CLASSIFIER
# Run the unit tests for other Spark versions but dont run full python integration tests
# NOT ALL TESTS NEEDED FOR PREMERGE
#env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark301tests,snapshot-shims test -Dpytest.TEST_TAGS=''
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark302tests,snapshot-shims test -Dpytest.TEST_TAGS=''
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark303tests,snapshot-shims test -Dpytest.TEST_TAGS=''
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark311tests,snapshot-shims test -Dpytest.TEST_TAGS=''
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark312tests,snapshot-shims test -Dpytest.TEST_TAGS=''
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark320tests,snapshot-shims test -Dpytest.TEST_TAGS=''
#env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark301tests,snapshot-shims test -Dpytest.TEST_TAGS='' -Dcuda.version=$CUDA_CLASSIFIER
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark302tests,snapshot-shims test -Dpytest.TEST_TAGS='' -Dcuda.version=$CUDA_CLASSIFIER
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark303tests,snapshot-shims test -Dpytest.TEST_TAGS='' -Dcuda.version=$CUDA_CLASSIFIER
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark311tests,snapshot-shims test -Dpytest.TEST_TAGS='' -Dcuda.version=$CUDA_CLASSIFIER
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark312tests,snapshot-shims test -Dpytest.TEST_TAGS='' -Dcuda.version=$CUDA_CLASSIFIER
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark320tests,snapshot-shims test -Dpytest.TEST_TAGS='' -Dcuda.version=$CUDA_CLASSIFIER

# The jacoco coverage should have been collected, but because of how the shade plugin
# works and jacoco we need to clean some things up so jacoco will only report for the
Expand Down

0 comments on commit c1daf1d

Please sign in to comment.