diff --git a/jenkins/Jenkinsfile.release b/jenkins/Jenkinsfile.release
index be05c396104..30fee881363 100644
--- a/jenkins/Jenkinsfile.release
+++ b/jenkins/Jenkinsfile.release
@@ -57,7 +57,7 @@ pipeline {
sh "mkdir -p ${HOME}/.zinc"
docker.image("$IMAGE_NAME").inside("--runtime=nvidia -v ${HOME}/.m2:${HOME}/.m2:rw \
-v ${HOME}/.zinc:${HOME}/.zinc:rw") {
- sh "mvn -U -B clean install $MVN_MIRROR -P source-javadoc"
+ sh "mvn -U -B clean install $MVN_MIRROR -P 'source-javadoc,!snapshot-shims'"
}
}
}
diff --git a/jenkins/databricks/build.sh b/jenkins/databricks/build.sh
index 27d44cbe295..9f8a282483e 100755
--- a/jenkins/databricks/build.sh
+++ b/jenkins/databricks/build.sh
@@ -41,7 +41,7 @@ rm -rf spark-rapids
mkdir spark-rapids
tar -zxvf $SPARKTGZ -C spark-rapids
cd spark-rapids
-mvn -B -Pdatabricks clean package -DskipTests || true
+mvn -B '-Pdatabricks,!snapshot-shims' clean package -DskipTests || true
M2DIR=/home/ubuntu/.m2/repository
CUDF_JAR=${M2DIR}/ai/rapids/cudf/${CUDF_VERSION}/cudf-${CUDF_VERSION}-${CUDA_VERSION}.jar
@@ -87,7 +87,7 @@ mvn -B install:install-file \
-Dversion=$SPARK_VERSION \
-Dpackaging=jar
-mvn -B -Pdatabricks clean package -DskipTests
+mvn -B '-Pdatabricks,!snapshot-shims' clean package -DskipTests
# Copy so we pick up new built jar and latesty CuDF jar. Note that the jar names has to be
# exactly what is in the staticly setup Databricks cluster we use.
diff --git a/jenkins/databricks/deploy.sh b/jenkins/databricks/deploy.sh
index ed71b949a31..f64757a91d6 100755
--- a/jenkins/databricks/deploy.sh
+++ b/jenkins/databricks/deploy.sh
@@ -25,5 +25,5 @@ echo "Maven mirror is $MVN_URM_MIRROR"
SERVER_ID='snapshots'
SERVER_URL="$URM_URL-local"
DBJARFPATH=./shims/spark300db/target/rapids-4-spark-shims-spark300-databricks_$SCALA_VERSION-$DATABRICKS_VERSION.jar
-mvn -B deploy:deploy-file $MVN_URM_MIRROR -Durl=$SERVER_URL -DrepositoryId=$SERVER_ID \
+mvn -B deploy:deploy-file $MVN_URM_MIRROR '-P!snapshot-shims' -Durl=$SERVER_URL -DrepositoryId=$SERVER_ID \
-Dfile=$DBJARFPATH -DpomFile=shims/spark300db/pom.xml
diff --git a/jenkins/deploy.sh b/jenkins/deploy.sh
index 8351fd2fbdd..4ac0777e0fe 100755
--- a/jenkins/deploy.sh
+++ b/jenkins/deploy.sh
@@ -60,9 +60,9 @@ if [ "$SIGN_FILE" == true ]; then
SQL_ART_VER=`mvn exec:exec -q -pl $SQL_PL -Dexec.executable=echo -Dexec.args='${project.version}'`
JS_FPATH="${SQL_PL}/target/${SQL_ART_ID}-${SQL_ART_VER}"
SRC_DOC_JARS="-Dsources=${JS_FPATH}-sources.jar -Djavadoc=${JS_FPATH}-javadoc.jar"
- DEPLOY_CMD="mvn -B -Pinclude-databricks gpg:sign-and-deploy-file -s jenkins/settings.xml -Dgpg.passphrase=$GPG_PASSPHRASE"
+ DEPLOY_CMD="mvn -B '-Pinclude-databricks,!snapshot-shims' gpg:sign-and-deploy-file -s jenkins/settings.xml -Dgpg.passphrase=$GPG_PASSPHRASE"
else
- DEPLOY_CMD="mvn -B -Pinclude-databricks deploy:deploy-file -s jenkins/settings.xml"
+ DEPLOY_CMD="mvn -B '-Pinclude-databricks,!snapshot-shims' deploy:deploy-file -s jenkins/settings.xml"
fi
echo "Deploy CMD: $DEPLOY_CMD"
diff --git a/jenkins/spark-nightly-build.sh b/jenkins/spark-nightly-build.sh
index 469efb79864..db3243454ed 100755
--- a/jenkins/spark-nightly-build.sh
+++ b/jenkins/spark-nightly-build.sh
@@ -19,11 +19,11 @@ set -ex
. jenkins/version-def.sh
-mvn -U -B -Pinclude-databricks clean deploy $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
+mvn -U -B -Pinclude-databricks,snapshot-shims clean deploy $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
# Run unit tests against other spark versions
-mvn -U -B -Pspark301tests test $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
-mvn -U -B -Pspark302tests test $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
-mvn -U -B -Pspark310tests test $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
+mvn -U -B -Pspark301tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
+mvn -U -B -Pspark302tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
+mvn -U -B -Pspark310tests,snapshot-shims test $MVN_URM_MIRROR -Dmaven.repo.local=$WORKSPACE/.m2
# Parse cudf and spark files from local mvn repo
jenkins/printJarVersion.sh "CUDFVersion" "${WORKSPACE}/.m2/ai/rapids/cudf/${CUDF_VER}" "cudf-${CUDF_VER}" "-${CUDA_CLASSIFIER}.jar"
diff --git a/jenkins/spark-premerge-build.sh b/jenkins/spark-premerge-build.sh
index 74558df7db3..f26b82cb9a6 100755
--- a/jenkins/spark-premerge-build.sh
+++ b/jenkins/spark-premerge-build.sh
@@ -37,10 +37,10 @@ export PATH="$SPARK_HOME/bin:$SPARK_HOME/sbin:$PATH"
tar zxf $SPARK_HOME.tgz -C $ARTF_ROOT && \
rm -f $SPARK_HOME.tgz
-mvn -U -B $MVN_URM_MIRROR -Pinclude-databricks clean verify -Dpytest.TEST_TAGS=''
+mvn -U -B $MVN_URM_MIRROR '-Pinclude-databricks,!snapshot-shims' clean verify -Dpytest.TEST_TAGS=''
# Run the unit tests for other Spark versions but dont run full python integration tests
-env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark301tests test -Dpytest.TEST_TAGS=''
-env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark310tests test -Dpytest.TEST_TAGS=''
+env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark301tests,snapshot-shims test -Dpytest.TEST_TAGS=''
+env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark310tests,snapshot-shims test -Dpytest.TEST_TAGS=''
# The jacoco coverage should have been collected, but because of how the shade plugin
# works and jacoco we need to clean some things up so jacoco will only report for the
diff --git a/pom.xml b/pom.xml
index 3c0f76825f9..ca3ca033fad 100644
--- a/pom.xml
+++ b/pom.xml
@@ -163,14 +163,17 @@
false
UTF-8
- UTF-8
- not qarun
- false
- 1.7.30
- 3.0.0
- 3.0.1-SNAPSHOT
- 3.0.2-SNAPSHOT
- 3.1.0-SNAPSHOT
+ UTF-8
+ not qarun
+ false
+ 1.7.30
+ 3.0.0
+
+ 3.0.1-SNAPSHOT
+ 3.0.2-SNAPSHOT
+ 3.1.0-SNAPSHOT
@@ -506,6 +509,7 @@
.pytest_cache/**
.github/pull_request_template.md
**/*.md
+ **/*.iml
NOTICE-binary
docs/dev/idea-code-style-settings.xml
**/.m2/**
@@ -515,6 +519,7 @@
*.jar
docs/demo/**/*.ipynb
docs/demo/**/*.zpln
+ **/src/main/resources/META-INF/services/*
diff --git a/shims/aggregator/pom.xml b/shims/aggregator/pom.xml
index 75dce894cd4..07a150a82c5 100644
--- a/shims/aggregator/pom.xml
+++ b/shims/aggregator/pom.xml
@@ -56,27 +56,35 @@
+
+ snapshot-shims
+
+ true
+
+
+
+ com.nvidia
+ rapids-4-spark-shims-spark310_${scala.binary.version}
+ ${project.version}
+ compile
+
+
+ com.nvidia
+ rapids-4-spark-shims-spark302_${scala.binary.version}
+ ${project.version}
+ compile
+
+
+ com.nvidia
+ rapids-4-spark-shims-spark301_${scala.binary.version}
+ ${project.version}
+ compile
+
+
+
-
- com.nvidia
- rapids-4-spark-shims-spark310_${scala.binary.version}
- ${project.version}
- compile
-
-
- com.nvidia
- rapids-4-spark-shims-spark302_${scala.binary.version}
- ${project.version}
- compile
-
-
- com.nvidia
- rapids-4-spark-shims-spark301_${scala.binary.version}
- ${project.version}
- compile
-
com.nvidia
rapids-4-spark-shims-spark300_${scala.binary.version}
diff --git a/shims/pom.xml b/shims/pom.xml
index d5420c8509a..5946987def5 100644
--- a/shims/pom.xml
+++ b/shims/pom.xml
@@ -39,13 +39,21 @@
spark300db
+
+ snapshot-shims
+
+ true
+
+
+ spark301
+ spark302
+ spark310
+
+
spark300
- spark301
- spark302
- spark310
aggregator
@@ -68,15 +76,6 @@
net.alchim31.maven
scala-maven-plugin
-
- org.apache.rat
- apache-rat-plugin
-
-
- **/src/main/resources/META-INF/services/*
-
-
-
org.scalastyle
scalastyle-maven-plugin
diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala
index 878acb2a559..17d8a4e22b2 100644
--- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala
+++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala
@@ -38,7 +38,8 @@ object ShimLoader extends Logging {
logInfo(s"Found shims: $sparkShimLoaders")
private val loader = sparkShimLoaders.headOption match {
case Some(loader) => loader
- case None => throw new IllegalArgumentException("Could not find Spark Shim Loader")
+ case None =>
+ throw new IllegalArgumentException(s"Could not find Spark Shim Loader for $sparkVersion")
}
private var sparkShims: SparkShims = null