Skip to content

Commit

Permalink
MVN_OPT to last, as it is empty in most cases [databricks] (#5534)
Browse files Browse the repository at this point in the history
* MVN_OPT to last, as it is empty in most cases

Signed-off-by: Tim Liu <[email protected]>

* Do not pass Databricks profiles, as it is not used

* Remove MVN_OPT first, will follow up in later PR
  • Loading branch information
NvTimLiu authored May 19, 2022
1 parent ca54899 commit 4f29451
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 6 deletions.
2 changes: 1 addition & 1 deletion jenkins/Jenkinsfile-blossom.premerge
Original file line number Diff line number Diff line change
Expand Up @@ -574,7 +574,7 @@ void databricksBuild() {
withCredentials([file(credentialsId: 'SPARK_DATABRICKS_PRIVKEY', variable: 'DATABRICKS_PRIVKEY')]) {
def BUILD_PARAMS = " -w $DATABRICKS_HOST -t $DATABRICKS_TOKEN -c $CLUSTER_ID -z ./spark-rapids-ci.tgz" +
" -p $DATABRICKS_PRIVKEY -l ./jenkins/databricks/build.sh -d /home/ubuntu/build.sh" +
" -b $BUILD_PROFILES -v $BASE_SPARK_VERSION -i $BASE_SPARK_VERSION_TO_INSTALL_DATABRICKS_JARS"
" -v $BASE_SPARK_VERSION -i $BASE_SPARK_VERSION_TO_INSTALL_DATABRICKS_JARS"
sh "python3 ./jenkins/databricks/run-build.py $BUILD_PARAMS"
}
}
Expand Down
8 changes: 5 additions & 3 deletions jenkins/databricks/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@ set -ex
SPARKSRCTGZ=$1
# version of Apache Spark we are building against
BASE_SPARK_VERSION=$2
MVN_OPT=$3
BASE_SPARK_VERSION_TO_INSTALL_DATABRICKS_JARS=$4
BASE_SPARK_VERSION_TO_INSTALL_DATABRICKS_JARS=$3

# Move MVN_OPT to last, as it is empty in most cases
MVN_OPT=$4
MVN_OPT=${MVN_OPT:-''}
BASE_SPARK_VERSION=${BASE_SPARK_VERSION:-'3.1.2'}
BUILDVER=$(echo ${BASE_SPARK_VERSION} | sed 's/\.//g')db
Expand Down Expand Up @@ -442,7 +444,7 @@ mvn -B install:install-file \
-Dversion=$SPARK_VERSION_TO_INSTALL_DATABRICKS_JARS \
-Dpackaging=jar

mvn -B -Ddatabricks -Dbuildver=$BUILDVER clean package -DskipTests $MVN_OPT
mvn -B -Ddatabricks -Dbuildver=$BUILDVER clean package -DskipTests

cd /home/ubuntu
tar -zcf spark-rapids-built.tgz spark-rapids
2 changes: 1 addition & 1 deletion jenkins/databricks/params.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
base_spark_pom_version = '3.1.1'
base_spark_version_to_install_databricks_jars = base_spark_pom_version
clusterid = ''
build_profiles = 'databricks,!snapshot-shims'
build_profiles = ''
jar_path = ''
# `spark_conf` can take comma seperated multiple spark configurations, e.g., spark.foo=1,spark.bar=2,...'
spark_conf = ''
Expand Down
2 changes: 1 addition & 1 deletion jenkins/databricks/run-build.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def main():
print("rsync command: %s" % rsync_command)
subprocess.check_call(rsync_command, shell = True)

ssh_command = "bash -c 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ubuntu@%s -p 2200 -i %s %s %s %s %s %s 2>&1 | tee buildout; if [ `echo ${PIPESTATUS[0]}` -ne 0 ]; then false; else true; fi'" % (master_addr, params.private_key_file, params.script_dest, params.tgz_dest, params.base_spark_pom_version, params.build_profiles, params.base_spark_version_to_install_databricks_jars)
ssh_command = "bash -c 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ubuntu@%s -p 2200 -i %s %s %s %s %s %s 2>&1 | tee buildout; if [ `echo ${PIPESTATUS[0]}` -ne 0 ]; then false; else true; fi'" % (master_addr, params.private_key_file, params.script_dest, params.tgz_dest, params.base_spark_pom_version, params.base_spark_version_to_install_databricks_jars, params.build_profiles)
print("ssh command: %s" % ssh_command)
subprocess.check_call(ssh_command, shell = True)

Expand Down

0 comments on commit 4f29451

Please sign in to comment.