Skip to content

Commit

Permalink
Add maven compile/package plugin executions for Spark302 and Spark301 (
Browse files Browse the repository at this point in the history
…#3257)

* initial pom files

* Copy code from 3.0.1 shim to 3.1.1


* move dependencies inside profile to package properly

Signed-off-by: Thomas Graves <tgraves@nvidia.com>

* revert 311 changes

* remove release 311 profiles

* fix shim dependencies

* revert change for 3.1.1

* change name of integration tests jar

* change to use with-classifier profile

* move dependencies back to wwhere they were

* fix copyright

* fix spacing

* Fix dependencies on dist pre-merge profile

* Fix integration test depdnencies

* Add property to set 2 profiles, address review comments

* debug premerge failures print active profiles

* disable mvn_verify

* enable mvn debug

* revert premerge build script changes

* Update 302 to match changes to 301 shim

* Update missed 301 dep
  • Loading branch information
tgravescs authored Aug 23, 2021
1 parent cc3f6f5 commit 7c4c6a9
Show file tree
Hide file tree
Showing 26 changed files with 2,053 additions and 161 deletions.
208 changes: 144 additions & 64 deletions dist/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -29,40 +29,150 @@
<description>Creates the distribution package of the RAPIDS plugin for Apache Spark</description>
<version>21.10.0-SNAPSHOT</version>

<dependencies>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-sql_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shuffle_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shims-aggregator_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-udf_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<!-- required for conf generation script -->
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<!-- required for conf generation script -->
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<profiles>
<profile>
<id>default</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-sql_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shuffle_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-udf_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shims-aggregator_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<!-- required for conf generation script -->
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<!-- required for conf generation script -->
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</profile>
<profile>
<id>with-classifier</id>
<activation>
<property><name>buildver</name></property>
</activation>
<dependencies>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-sql_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<classifier>${spark.version.classifier}</classifier>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shuffle_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<classifier>${spark.version.classifier}</classifier>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-udf_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<classifier>${spark.version.classifier}</classifier>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shims-aggregator_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<!-- required for conf generation script -->
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<!-- required for conf generation script -->
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</profile>
<profile>
<id>pre-merge</id>
<dependencies>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-sql_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shuffle_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-udf_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shims-aggregator_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<!-- required for conf generation script -->
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<!-- required for conf generation script -->
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<executions>
<execution>
<id>if_modified_files</id>
<phase>verify</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>bash</executable>
<commandlineArgs>-c 'export MODIFIED=$(git status --porcelain | grep "^ M"); [[ -z $MODIFIED ]] &amp;&amp; exit 0 || { echo -e "found modified files during mvn verify:\n$MODIFIED"; exit 1;}'</commandlineArgs>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>


<build>
<plugins>
Expand Down Expand Up @@ -206,34 +316,4 @@
</plugins>
</build>

<profiles>
<profile>
<id>pre-merge</id>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<executions>
<execution>
<id>if_modified_files</id>
<phase>verify</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>bash</executable>
<commandlineArgs>-c 'export MODIFIED=$(git status --porcelain | grep "^ M"); [[ -z $MODIFIED ]] &amp;&amp; exit 0 || { echo -e "found modified files during mvn verify:\n$MODIFIED"; exit 1;}'</commandlineArgs>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
</profile>
</profiles>

</project>
58 changes: 51 additions & 7 deletions integration_tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,6 @@
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-tests_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-udf-examples_${scala.binary.version}</artifactId>
Expand All @@ -82,6 +75,57 @@
</dependency>
</dependencies>

<profiles>
<profile>
<id>default</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-tests_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>
</profile>
<profile>
<id>with-classifier</id>
<activation>
<property><name>buildver</name></property>
</activation>
<dependencies>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-tests_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<classifier>${spark.version.classifier}tests</classifier>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<classifier>${spark.version.classifier}</classifier>
</configuration>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<finalName>rapids-4-spark-integration-tests_${scala.binary.version}-${project.version}-${spark.version.classifier}</finalName>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>

<build>
<plugins>
<plugin>
Expand Down
33 changes: 33 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,34 @@
</modules>

<profiles>
<profile>
<id>release301</id>
<activation>
<property>
<name>buildver</name>
<value>301</value>
</property>
</activation>
<properties>
<spark.version.classifier>spark301</spark.version.classifier>
<spark.version>${spark301.version}</spark.version>
<spark.test.version>${spark301.version}</spark.test.version>
</properties>
</profile>
<profile>
<id>release302</id>
<activation>
<property>
<name>buildver</name>
<value>302</value>
</property>
</activation>
<properties>
<spark.version.classifier>spark302</spark.version.classifier>
<spark.version>${spark302.version}</spark.version>
<spark.test.version>${spark302.version}</spark.test.version>
</properties>
</profile>
<profile>
<id>udf-compiler</id>
<modules>
Expand Down Expand Up @@ -232,6 +260,7 @@
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<spark.version>${spark301.version}</spark.version>
<spark.version.classifier></spark.version.classifier>
<spark.test.version>${spark301.version}</spark.test.version>
<cuda.version>cuda11</cuda.version>
<cudf.version>21.10.0-SNAPSHOT</cudf.version>
Expand Down Expand Up @@ -592,9 +621,13 @@
<version>${maven.jar.plugin.version}</version>
<executions>
<execution>
<id>default-test-jar</id>
<goals>
<goal>test-jar</goal>
</goals>
<configuration>
<classifier>${spark.version.classifier}tests</classifier>
</configuration>
</execution>
</executions>
</plugin>
Expand Down
Loading

0 comments on commit 7c4c6a9

Please sign in to comment.