diff --git a/.gitignore b/.gitignore index 844b225cc521e..52ee9871dd381 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +.cache/ build/ dcos-commons-tools/ tests/env diff --git a/bin/jenkins-dist-publish.sh b/bin/jenkins-dist-publish.sh index e2115d7aa3fa9..2b75137d0465a 100755 --- a/bin/jenkins-dist-publish.sh +++ b/bin/jenkins-dist-publish.sh @@ -34,8 +34,6 @@ SPARK_VERSION=${GIT_BRANCH#origin/tags/custom-} # e.g. "2.0.2" source "${DIR}/jenkins.sh" pushd "${SPARK_BUILD_DIR}" -install_cli docker_login -# build_and_test publish_dists popd diff --git a/bin/jenkins-dist-test.sh b/bin/jenkins-dist-test.sh index 169eff770bac9..938c04e5bcef4 100755 --- a/bin/jenkins-dist-test.sh +++ b/bin/jenkins-dist-test.sh @@ -8,7 +8,6 @@ SPARK_DIR="${DIR}/../../spark" function run() { source bin/jenkins.sh - install_cli docker_login build_and_test } diff --git a/bin/jenkins-package-publish.sh b/bin/jenkins-package-publish.sh index 4d877287eca77..8ff92644552e3 100755 --- a/bin/jenkins-package-publish.sh +++ b/bin/jenkins-package-publish.sh @@ -36,7 +36,6 @@ source "${DIR}/jenkins.sh" pushd "${SPARK_BUILD_DIR}" SPARK_VERSION=$(jq -r ".spark_version" manifest.json) -install_cli docker_login publish_docker_images make_universe diff --git a/bin/jenkins-package-test.sh b/bin/jenkins-package-test.sh index ba885254d4ae3..367c799699daf 100755 --- a/bin/jenkins-package-test.sh +++ b/bin/jenkins-package-test.sh @@ -7,9 +7,7 @@ SPARK_BUILD_DIR=${DIR}/.. function run() { source bin/jenkins.sh - install_cli docker_login - make --directory=dispatcher universe export $(cat "${WORKSPACE}/stub-universe.properties") make test diff --git a/bin/jenkins.sh b/bin/jenkins.sh index 869730cdb07fc..d9b2f43059a63 100644 --- a/bin/jenkins.sh +++ b/bin/jenkins.sh @@ -112,20 +112,6 @@ function docker_version() { echo "${SPARK_BUILD_VERSION}-hadoop-$1" } -function install_cli { - curl -O https://downloads.mesosphere.io/dcos-cli/install.sh - rm -rf dcos-cli/ - mkdir dcos-cli - bash install.sh dcos-cli http://change.me --add-path no - source dcos-cli/bin/env-setup - - # hack because the installer forces an old CLI version - pip install -U dcoscli - - # needed in `make test` - pip3 install jsonschema -} - function docker_login { docker login --email=docker@mesosphere.io --username="${DOCKER_USERNAME}" --password="${DOCKER_PASSWORD}" } diff --git a/bin/test.sh b/bin/test.sh index 7714a178d9e97..d7ad28bc80cde 100755 --- a/bin/test.sh +++ b/bin/test.sh @@ -7,6 +7,7 @@ set -x set -o pipefail BIN_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +SPARK_BUILD_DIR="${BIN_DIR}/.." check_env() { # Check env early, before starting the cluster: @@ -36,16 +37,6 @@ start_cluster() { fi } -configure_cli() { - dcos config set core.dcos_url "${DCOS_URL}" - dcos config set core.ssl_verify false - ${COMMONS_DIR}/tools/dcos_login.py - dcos config show - if [ -n "${STUB_UNIVERSE_URL}" ]; then - dcos package repo add --index=0 spark-test "${STUB_UNIVERSE_URL}" - fi -} - initialize_service_account() { if [ "$SECURITY" = "strict" ]; then ${COMMONS_DIR}/tools/create_service_account.sh --strict @@ -59,21 +50,15 @@ build_scala_test_jar() { } run_tests() { - pushd tests - if [[ ! -d venv ]]; then - virtualenv -p python3 venv - fi - source venv/bin/activate - pip install -r requirements.txt - SCALA_TEST_JAR_PATH=$(pwd)/jobs/scala/target/scala-2.11/dcos-spark-scala-tests-assembly-0.1-SNAPSHOT.jar \ - py.test -s test.py - popd + SCALA_TEST_JAR_PATH=${SPARK_BUILD_DIR}/tests/jobs/scala/target/scala-2.11/dcos-spark-scala-tests-assembly-0.1-SNAPSHOT.jar \ + CLUSTER_URL=${DCOS_URL} \ + STUB_UNIVERSE_URL=${STUB_UNIVERSE_URL} \ + ${COMMONS_DIR}/tools/run_tests.py shakedown ${SPARK_BUILD_DIR}/tests ${SPARK_BUILD_DIR}/tests/requirements.txt } check_env start_cluster # TODO: Migrate the following three commands to dcos-commons-tools/run-tests.py -configure_cli initialize_service_account build_scala_test_jar run_tests diff --git a/tests/test.py b/tests/test_spark.py similarity index 91% rename from tests/test.py rename to tests/test_spark.py index 26f451a4016e5..7ee64b73bb2f6 100644 --- a/tests/test.py +++ b/tests/test_spark.py @@ -25,11 +25,16 @@ def _init_logging(): logging.basicConfig(level=logging.INFO) logging.getLogger('dcos').setLevel(logging.WARNING) + logging.getLogger('requests').setLevel(logging.WARNING) _init_logging() LOGGER = logging.getLogger(__name__) THIS_DIR = os.path.dirname(os.path.abspath(__file__)) +DEFAULT_HDFS_TASK_COUNT=10 +HDFS_PACKAGE_NAME='beta-hdfs' +HDFS_SERVICE_NAME='hdfs' +SPARK_PACKAGE_NAME='spark' def setup_module(module): @@ -39,12 +44,13 @@ def setup_module(module): def teardown_module(module): - shakedown.uninstall_package_and_wait('spark') + shakedown.uninstall_package_and_wait(SPARK_PACKAGE_NAME) if _hdfs_enabled(): - shakedown.uninstall_package_and_wait('hdfs') - _run_janitor('hdfs') + shakedown.uninstall_package_and_wait(HDFS_PACKAGE_NAME, HDFS_SERVICE_NAME) + _run_janitor(HDFS_PACKAGE_NAME) +@pytest.mark.sanity def test_jar(): master_url = ("https" if _is_strict() else "http") + "://leader.mesos:5050" spark_job_runner_args = '{} dcos \\"*\\" spark:only 2 --auth-token={}'.format( @@ -57,6 +63,7 @@ def test_jar(): ["--class", 'com.typesafe.spark.test.mesos.framework.runners.SparkJobRunner']) +@pytest.mark.sanity def test_teragen(): if _hdfs_enabled(): jar_url = "https://downloads.mesosphere.io/spark/examples/spark-terasort-1.0-jar-with-dependencies_2.11.jar" @@ -66,6 +73,7 @@ def test_teragen(): ["--class", "com.github.ehiggs.spark.terasort.TeraGen"]) +@pytest.mark.sanity def test_python(): python_script_path = os.path.join(THIS_DIR, 'jobs', 'python', 'pi_with_include.py') python_script_url = _upload_file(python_script_path) @@ -100,6 +108,7 @@ def test_kerberos(): "--conf", "sun.security.krb5.debug=true"]) +@pytest.mark.sanity def test_r(): r_script_path = os.path.join(THIS_DIR, 'jobs', 'R', 'dataframe.R') r_script_url = _upload_file(r_script_path) @@ -108,6 +117,7 @@ def test_r(): "Justin") +@pytest.mark.sanity def test_cni(): SPARK_EXAMPLES="http://downloads.mesosphere.com/spark/assets/spark-examples_2.11-2.0.1.jar" _run_tests(SPARK_EXAMPLES, @@ -117,6 +127,7 @@ def test_cni(): "--class", "org.apache.spark.examples.SparkPi"]) +@pytest.mark.sanity def test_s3(): linecount_path = os.path.join(THIS_DIR, 'resources', 'linecount.txt') s3.upload_file(linecount_path) @@ -147,14 +158,14 @@ def _hdfs_enabled(): def _require_hdfs(): LOGGER.info("Ensuring HDFS is installed.") - _require_package('hdfs', _get_hdfs_options()) + _require_package(HDFS_PACKAGE_NAME, _get_hdfs_options()) _wait_for_hdfs() def _require_spark(): LOGGER.info("Ensuring Spark is installed.") - _require_package('spark', _get_spark_options()) + _require_package(SPARK_PACKAGE_NAME, _get_spark_options()) _wait_for_spark() @@ -187,7 +198,9 @@ def _get_hdfs_options(): if _is_strict(): options = {'service': {'principal': 'service-acct', 'secret_name': 'secret'}} else: - options = {} + options = {"service": {}} + + options["service"]["beta-optin"] = True return options @@ -195,9 +208,8 @@ def _wait_for_hdfs(): shakedown.wait_for(_is_hdfs_ready, ignore_exceptions=False, timeout_seconds=900) -DEFAULT_HDFS_TASK_COUNT=10 def _is_hdfs_ready(expected_tasks = DEFAULT_HDFS_TASK_COUNT): - running_tasks = [t for t in shakedown.get_service_tasks('hdfs') \ + running_tasks = [t for t in shakedown.get_service_tasks(HDFS_SERVICE_NAME) \ if t['state'] == 'TASK_RUNNING'] return len(running_tasks) >= expected_tasks @@ -231,7 +243,10 @@ def _install_spark(): "principal": "service-acct"} options['security'] = {"mesos": {"authentication": {"secret_name": "secret"}}} - shakedown.install_package('spark', options_json=options, wait_for_completion=True) + shakedown.install_package( + SPARK_PACKAGE_NAME, + options_json=options, + wait_for_completion=True) def pred(): dcos_url = dcos.config.get_config_val("core.dcos_url")