diff --git a/jenkins/spark-tests.sh b/jenkins/spark-tests.sh index 0a455afcb10..8c7db3479eb 100755 --- a/jenkins/spark-tests.sh +++ b/jenkins/spark-tests.sh @@ -304,10 +304,17 @@ if [[ $TEST_MODE == "DEFAULT" ]]; then PYSP_TEST_spark_shuffle_manager=com.nvidia.spark.rapids.${SHUFFLE_SPARK_SHIM}.RapidsShuffleManager \ ./run_pyspark_from_build.sh - SPARK_SHELL_SMOKE_TEST=1 \ - PYSP_TEST_spark_jars_packages=com.nvidia:rapids-4-spark_${SCALA_BINARY_VER}:${PROJECT_VER} \ - PYSP_TEST_spark_jars_repositories=${PROJECT_REPO} \ - ./run_pyspark_from_build.sh + # As '--packages' only works on the default cuda11 jar, it does not support classifiers + # refer to issue : https://issues.apache.org/jira/browse/SPARK-20075 + # "$CLASSIFIER" == ''" is usally for the case running by developers, + # while "$CLASSIFIER" == "cuda11" is for the case running on CI. + # We expect to run packages test for both cases + if [[ "$CLASSIFIER" == "" || "$CLASSIFIER" == "cuda11" ]]; then + SPARK_SHELL_SMOKE_TEST=1 \ + PYSP_TEST_spark_jars_packages=com.nvidia:rapids-4-spark_${SCALA_BINARY_VER}:${PROJECT_VER} \ + PYSP_TEST_spark_jars_repositories=${PROJECT_REPO} \ + ./run_pyspark_from_build.sh + if # ParquetCachedBatchSerializer cache_test PYSP_TEST_spark_sql_cache_serializer=com.nvidia.spark.ParquetCachedBatchSerializer \