Skip to content

Commit

Permalink
Added in profile to enable shims for SNAPSHOT releases (NVIDIA#611)
Browse files Browse the repository at this point in the history
* Added in profile to enable shims for SNAPSHOT releases

Signed-off-by: Robert (Bobby) Evans <[email protected]>

* Updated jenkins files to be explicit about snapshot-shims

* Addressed review comments
  • Loading branch information
revans2 authored Aug 26, 2020
1 parent 7ac919b commit 1888adb
Show file tree
Hide file tree
Showing 10 changed files with 65 additions and 52 deletions.
2 changes: 1 addition & 1 deletion jenkins/Jenkinsfile.release
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ pipeline {
sh "mkdir -p ${HOME}/.zinc"
docker.image("$IMAGE_NAME").inside("--runtime=nvidia -v ${HOME}/.m2:${HOME}/.m2:rw \
-v ${HOME}/.zinc:${HOME}/.zinc:rw") {
sh "mvn -U -B clean install $MVN_MIRROR -P source-javadoc"
sh "mvn -U -B clean install $MVN_MIRROR -P 'source-javadoc,!snapshot-shims'"
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions jenkins/databricks/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ rm -rf spark-rapids
mkdir spark-rapids
tar -zxvf $SPARKTGZ -C spark-rapids
cd spark-rapids
mvn -B -Pdatabricks clean package -DskipTests || true
mvn -B '-Pdatabricks,!snapshot-shims' clean package -DskipTests || true
M2DIR=/home/ubuntu/.m2/repository
CUDF_JAR=${M2DIR}/ai/rapids/cudf/${CUDF_VERSION}/cudf-${CUDF_VERSION}-${CUDA_VERSION}.jar

Expand Down Expand Up @@ -87,7 +87,7 @@ mvn -B install:install-file \
-Dversion=$SPARK_VERSION \
-Dpackaging=jar

mvn -B -Pdatabricks clean package -DskipTests
mvn -B '-Pdatabricks,!snapshot-shims' clean package -DskipTests

# Copy so we pick up new built jar and latesty CuDF jar. Note that the jar names has to be
# exactly what is in the staticly setup Databricks cluster we use.
Expand Down
2 changes: 1 addition & 1 deletion jenkins/databricks/deploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,5 +25,5 @@ echo "Maven mirror is $MVN_URM_MIRROR"
SERVER_ID='snapshots'
SERVER_URL="$URM_URL-local"
DBJARFPATH=./shims/spark300db/target/rapids-4-spark-shims-spark300-databricks_$SCALA_VERSION-$DATABRICKS_VERSION.jar
mvn -B deploy:deploy-file $MVN_URM_MIRROR -Durl=$SERVER_URL -DrepositoryId=$SERVER_ID \
mvn -B deploy:deploy-file $MVN_URM_MIRROR '-P!snapshot-shims' -Durl=$SERVER_URL -DrepositoryId=$SERVER_ID \
-Dfile=$DBJARFPATH -DpomFile=shims/spark300db/pom.xml
4 changes: 2 additions & 2 deletions jenkins/deploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,9 @@ if [ "$SIGN_FILE" == true ]; then
SQL_ART_VER=`mvn exec:exec -q -pl $SQL_PL -Dexec.executable=echo -Dexec.args='${project.version}'`
JS_FPATH="${SQL_PL}/target/${SQL_ART_ID}-${SQL_ART_VER}"
SRC_DOC_JARS="-Dsources=${JS_FPATH}-sources.jar -Djavadoc=${JS_FPATH}-javadoc.jar"
DEPLOY_CMD="mvn -B -Pinclude-databricks gpg:sign-and-deploy-file -s jenkins/settings.xml -Dgpg.passphrase=$GPG_PASSPHRASE"
DEPLOY_CMD="mvn -B '-Pinclude-databricks,!snapshot-shims' gpg:sign-and-deploy-file -s jenkins/settings.xml -Dgpg.passphrase=$GPG_PASSPHRASE"
else
DEPLOY_CMD="mvn -B -Pinclude-databricks deploy:deploy-file -s jenkins/settings.xml"
DEPLOY_CMD="mvn -B '-Pinclude-databricks,!snapshot-shims' deploy:deploy-file -s jenkins/settings.xml"
fi

echo "Deploy CMD: $DEPLOY_CMD"
Expand Down
8 changes: 4 additions & 4 deletions jenkins/spark-nightly-build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@ set -ex

. jenkins/version-def.sh

mvn -U -B -Pinclude-databricks clean deploy $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
mvn -U -B -Pinclude-databricks,snapshot-shims clean deploy $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
# Run unit tests against other spark versions
mvn -U -B -Pspark301tests test $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
mvn -U -B -Pspark302tests test $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
mvn -U -B -Pspark310tests test $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
mvn -U -B -Pspark301tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
mvn -U -B -Pspark302tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
mvn -U -B -Pspark310tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2

# Parse cudf and spark files from local mvn repo
jenkins/printJarVersion.sh "CUDFVersion" "${WORKSPACE}/.m2/ai/rapids/cudf/${CUDF_VER}" "cudf-${CUDF_VER}" "-${CUDA_CLASSIFIER}.jar"
Expand Down
6 changes: 3 additions & 3 deletions jenkins/spark-premerge-build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,10 @@ export PATH="$SPARK_HOME/bin:$SPARK_HOME/sbin:$PATH"
tar zxf $SPARK_HOME.tgz -C $ARTF_ROOT && \
rm -f $SPARK_HOME.tgz

mvn -U -B $MVN_URM_MIRROR -Pinclude-databricks clean verify -Dpytest.TEST_TAGS=''
mvn -U -B $MVN_URM_MIRROR '-Pinclude-databricks,!snapshot-shims' clean verify -Dpytest.TEST_TAGS=''
# Run the unit tests for other Spark versions but dont run full python integration tests
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark301tests test -Dpytest.TEST_TAGS=''
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark310tests test -Dpytest.TEST_TAGS=''
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark301tests,snapshot-shims test -Dpytest.TEST_TAGS=''
env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark310tests,snapshot-shims test -Dpytest.TEST_TAGS=''

# The jacoco coverage should have been collected, but because of how the shade plugin
# works and jacoco we need to clean some things up so jacoco will only report for the
Expand Down
21 changes: 13 additions & 8 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -163,14 +163,17 @@
<test.include.tags></test.include.tags>
<rapids.shuffle.manager.override>false</rapids.shuffle.manager.override>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.sourceEncoding>UTF-8</project.reporting.sourceEncoding>
<pytest.TEST_TAGS>not qarun</pytest.TEST_TAGS>
<rat.consoleOutput>false</rat.consoleOutput>
<slf4j.version>1.7.30</slf4j.version>
<spark300.version>3.0.0</spark300.version>
<spark301.version>3.0.1-SNAPSHOT</spark301.version>
<spark302.version>3.0.2-SNAPSHOT</spark302.version>
<spark310.version>3.1.0-SNAPSHOT</spark310.version>
<project.reporting.sourceEncoding>UTF-8</project.reporting.sourceEncoding>
<pytest.TEST_TAGS>not qarun</pytest.TEST_TAGS>
<rat.consoleOutput>false</rat.consoleOutput>
<slf4j.version>1.7.30</slf4j.version>
<spark300.version>3.0.0</spark300.version>
<!--
If you update a dependendy version so it is no longer a SNAPSHOT
please update the snapshot-shims profile as well so it is accurate -->
<spark301.version>3.0.1-SNAPSHOT</spark301.version>
<spark302.version>3.0.2-SNAPSHOT</spark302.version>
<spark310.version>3.1.0-SNAPSHOT</spark310.version>
</properties>

<dependencyManagement>
Expand Down Expand Up @@ -506,6 +509,7 @@
<exclude>.pytest_cache/**</exclude>
<exclude>.github/pull_request_template.md</exclude>
<exclude>**/*.md</exclude>
<exclude>**/*.iml</exclude>
<exclude>NOTICE-binary</exclude>
<exclude>docs/dev/idea-code-style-settings.xml</exclude>
<exclude>**/.m2/**</exclude>
Expand All @@ -515,6 +519,7 @@
<exclude>*.jar</exclude>
<exclude>docs/demo/**/*.ipynb</exclude>
<exclude>docs/demo/**/*.zpln</exclude>
<exclude>**/src/main/resources/META-INF/services/*</exclude>
</excludes>
</configuration>
</plugin>
Expand Down
44 changes: 26 additions & 18 deletions shims/aggregator/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -56,27 +56,35 @@
</dependency>
</dependencies>
</profile>
<profile>
<id>snapshot-shims</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shims-spark310_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shims-spark302_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shims-spark301_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>compile</scope>
</dependency>
</dependencies>
</profile>
</profiles>

<dependencies>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shims-spark310_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shims-spark302_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shims-spark301_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.nvidia</groupId>
<artifactId>rapids-4-spark-shims-spark300_${scala.binary.version}</artifactId>
Expand Down
23 changes: 11 additions & 12 deletions shims/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,21 @@
<module>spark300db</module>
</modules>
</profile>
<profile>
<id>snapshot-shims</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<modules>
<module>spark301</module>
<module>spark302</module>
<module>spark310</module>
</modules>
</profile>
</profiles>

<modules>
<module>spark300</module>
<module>spark301</module>
<module>spark302</module>
<module>spark310</module>
<module>aggregator</module>
</modules>
<dependencies>
Expand All @@ -68,15 +76,6 @@
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/src/main/resources/META-INF/services/*</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.scalastyle</groupId>
<artifactId>scalastyle-maven-plugin</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ object ShimLoader extends Logging {
logInfo(s"Found shims: $sparkShimLoaders")
private val loader = sparkShimLoaders.headOption match {
case Some(loader) => loader
case None => throw new IllegalArgumentException("Could not find Spark Shim Loader")
case None =>
throw new IllegalArgumentException(s"Could not find Spark Shim Loader for $sparkVersion")
}
private var sparkShims: SparkShims = null

Expand Down

0 comments on commit 1888adb

Please sign in to comment.