diff --git a/.circleci/config.yml b/.circleci/config.yml index b2f007e..73c8348 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -2,25 +2,10 @@ version: 2.1 jobs: - integration-tests: + integration-tests-core: docker: - image: cimg/python:3.9.9 - image: cimg/postgres:14.0 - - image: godatadriven/spark:3.1.1 - ports: - - "10000:10000" - - "4040:4040" - depends_on: - - dbt-hive-metastore - command: > - --class org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 - --name Thrift JDBC/ODBC Server - volumes: - - ./.spark-warehouse/:/spark-warehouse/ - - ./docker/hive-site.xml:/usr/spark/conf/hive-site.xml - - ./docker/spark-defaults.conf:/usr/spark/conf/spark-defaults.conf - environment: - - WAIT_FOR=dbt-hive-metastore:5432 resource_class: small @@ -39,7 +24,8 @@ jobs: . venv/bin/activate pip install -U pip setuptools wheel pip install docker-compose - pip install dbt-core==$DBT_VERSION dbt-postgres==$DBT_VERSION dbt-bigquery==$DBT_VERSION dbt-snowflake==$DBT_VERSION dbt-duckdb==$DBT_VERSION + pip install dbt-core==$DBT_VERSION dbt-postgres==$DBT_VERSION dbt-bigquery==$DBT_VERSION dbt-snowflake==$DBT_VERSION + pip install dbt-duckdb==$DBT_VERSION - run: name: Install dbt dependencies @@ -87,6 +73,54 @@ jobs: . venv/bin/activate dbt build -t duckdb --project-dir $DBT_PROJECT_DIR + - store_artifacts: + path: ./logs + + integration-tests-spark-thrift: + docker: + - image: cimg/python:3.9.9 + - image: godatadriven/spark:3.1.1 + environment: + WAIT_FOR: localhost:5432 + command: > + --class org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 + --name Thrift JDBC/ODBC Server + - image: postgres:9.6.17-alpine + environment: + POSTGRES_USER: dbt + POSTGRES_PASSWORD: dbt + POSTGRES_DB: metastore + resource_class: small + + environment: + DBT_PROFILES_DIR: ./integration_tests/ci + DBT_PROJECT_DIR: ./integration_tests + DBT_VERSION: 1.6.0 + + steps: + - checkout + - run: + name: Install Ubuntu dev packages + command: | + sudo apt-get update + sudo apt-get install libsasl2-dev libsasl2-2 + - run: + name: Install Python packages + command: | + python3 -m venv venv + . venv/bin/activate + pip install -U pip setuptools wheel + pip install dbt-core==$DBT_VERSION + pip install dbt-spark "dbt-spark[PyHive]" + + - run: + name: Install dbt dependencies + command: | + . venv/bin/activate + dbt deps --project-dir $DBT_PROJECT_DIR + - run: + name: Wait for Spark-Thrift + command: dockerize -wait tcp://localhost:10000 -timeout 15m -wait-retry-interval 5s - run: name: "Run Tests - Spark" command: | @@ -102,6 +136,9 @@ workflows: jobs: - hold: type: approval - - integration-tests: + # - integration-tests-core: + # requires: + # - hold + - integration-tests-spark-thrift: requires: - hold