diff --git a/Jenkinsfile b/Jenkinsfile index 9aacc64123e1..17ada9be442a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -67,7 +67,7 @@ pipeline { 'build-cpu-non-omp': { BuildCPUNonOmp() }, 'build-gpu-cuda10.0': { BuildCUDA(cuda_version: '10.0') }, 'build-gpu-cuda10.1': { BuildCUDA(cuda_version: '10.1') }, - 'build-jvm-packages': { BuildJVMPackages(spark_version: '2.4.3') }, + 'build-jvm-packages': { BuildJVMPackages(spark_version: '3.0.0') }, 'build-jvm-doc': { BuildJVMDoc() } ]) } @@ -85,7 +85,7 @@ pipeline { 'test-python-mgpu-cuda10.1': { TestPythonGPU(cuda_version: '10.1', multi_gpu: true) }, 'test-cpp-gpu': { TestCppGPU(cuda_version: '10.1') }, 'test-cpp-mgpu': { TestCppGPU(cuda_version: '10.1', multi_gpu: true) }, - 'test-jvm-jdk8': { CrossTestJVMwithJDK(jdk_version: '8', spark_version: '2.4.3') }, + 'test-jvm-jdk8': { CrossTestJVMwithJDK(jdk_version: '8', spark_version: '3.0.0') }, 'test-jvm-jdk11': { CrossTestJVMwithJDK(jdk_version: '11') }, 'test-jvm-jdk12': { CrossTestJVMwithJDK(jdk_version: '12') }, 'test-r-3.5.3': { TestR(use_r35: true) } @@ -99,7 +99,7 @@ pipeline { steps { script { parallel ([ - 'deploy-jvm-packages': { DeployJVMPackages(spark_version: '2.4.3') } + 'deploy-jvm-packages': { DeployJVMPackages(spark_version: '3.0.0') } ]) } milestone ordinal: 5 diff --git a/tests/ci_build/Dockerfile.jvm_cross b/tests/ci_build/Dockerfile.jvm_cross index 1e3c146e28cb..e143051c38eb 100644 --- a/tests/ci_build/Dockerfile.jvm_cross +++ b/tests/ci_build/Dockerfile.jvm_cross @@ -1,6 +1,6 @@ FROM ubuntu:18.04 ARG JDK_VERSION=8 -ARG SPARK_VERSION=2.4.3 +ARG SPARK_VERSION=3.0.0 # Environment ENV DEBIAN_FRONTEND noninteractive @@ -21,10 +21,9 @@ RUN \ tar xvf apache-maven-3.6.1-bin.tar.gz -C /opt && \ ln -s /opt/apache-maven-3.6.1/ /opt/maven && \ # Spark - # This should be: wget https://archive.apache.org/dist/spark/spark-$SPARK_VERSION/spark-$SPARK_VERSION-bin-scala2.12-hadoop2.7.tgz - /opt/python/bin/python -m awscli s3 cp s3://xgboost-ci-jenkins-artifacts/spark-$SPARK_VERSION-bin-scala2.12-hadoop2.7.tgz . && \ - tar xvf spark-$SPARK_VERSION-bin-scala2.12-hadoop2.7.tgz -C /opt && \ - ln -s /opt/spark-$SPARK_VERSION-bin-scala2.12-hadoop2.7 /opt/spark + wget https://archive.apache.org/dist/spark/spark-$SPARK_VERSION/spark-$SPARK_VERSION-bin-hadoop2.7.tgz && \ + tar xvf spark-$SPARK_VERSION-bin-hadoop2.7.tgz -C /opt && \ + ln -s /opt/spark-$SPARK_VERSION-bin-hadoop2.7 /opt/spark ENV PATH=/opt/python/bin:/opt/spark/bin:/opt/maven/bin:$PATH