diff --git a/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst index fe7b443bb5c8fa..3ad8c48180045f 100644 --- a/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst +++ b/docs/apache-airflow-providers-google/operators/transfer/sheets_to_gcs.rst @@ -38,7 +38,7 @@ Upload data from Google Sheets to GCS To upload data from Google Spreadsheet to Google Cloud Storage you can use the :class:`~airflow.providers.google.cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator`. -.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py +.. exampleinclude:: /../../tests/system/providers/google/sheets/example_sheets_to_gcs.py :language: python :dedent: 4 :start-after: [START upload_sheet_to_gcs] diff --git a/tests/providers/google/cloud/transfers/test_sheets_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_sheets_to_gcs_system.py deleted file mode 100644 index 529b26f078ca28..00000000000000 --- a/tests/providers/google/cloud/transfers/test_sheets_to_gcs_system.py +++ /dev/null @@ -1,40 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import pytest - -from airflow.providers.google.cloud.example_dags.example_life_sciences import BUCKET -from tests.providers.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY -from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context - - -@pytest.mark.backend("mysql", "postgres") -@pytest.mark.credential_file(GCP_GCS_KEY) -class GoogleSheetsToGCSExampleDagsSystemTest(GoogleSystemTest): - @provide_gcp_context(GCP_GCS_KEY) - def setUp(self): - super().setUp() - self.create_gcs_bucket(BUCKET) - - @provide_gcp_context(GCP_GCS_KEY) - def test_run_example_dag_function(self): - self.run_dag('example_sheets_to_gcs', CLOUD_DAG_FOLDER) - - @provide_gcp_context(GCP_GCS_KEY) - def tearDown(self): - self.delete_gcs_bucket(BUCKET) - super().tearDown() diff --git a/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py b/tests/system/providers/google/sheets/example_sheets_to_gcs.py similarity index 52% rename from airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py rename to tests/system/providers/google/sheets/example_sheets_to_gcs.py index 0741fa0e3d332d..310131717df53c 100644 --- a/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py +++ b/tests/system/providers/google/sheets/example_sheets_to_gcs.py @@ -20,22 +20,56 @@ from datetime import datetime from airflow import models +from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.transfers.sheets_to_gcs import GoogleSheetsToGCSOperator +from airflow.utils.trigger_rule import TriggerRule -BUCKET = os.environ.get("GCP_GCS_BUCKET", "test28397yeo") +ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") +PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") +DAG_ID = "example_sheets_to_gcs" + +BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}" SPREADSHEET_ID = os.environ.get("SPREADSHEET_ID", "1234567890qwerty") with models.DAG( - "example_sheets_to_gcs", + DAG_ID, schedule_interval='@once', # Override to match your needs start_date=datetime(2021, 1, 1), catchup=False, - tags=["example"], + tags=["example", "sheets"], ) as dag: + create_bucket = GCSCreateBucketOperator( + task_id="create_bucket", bucket_name=BUCKET_NAME, project_id=PROJECT_ID + ) + # [START upload_sheet_to_gcs] upload_sheet_to_gcs = GoogleSheetsToGCSOperator( task_id="upload_sheet_to_gcs", - destination_bucket=BUCKET, + destination_bucket=BUCKET_NAME, spreadsheet_id=SPREADSHEET_ID, ) # [END upload_sheet_to_gcs] + + delete_bucket = GCSDeleteBucketOperator( + task_id="delete_bucket", bucket_name=BUCKET_NAME, trigger_rule=TriggerRule.ALL_DONE + ) + + ( + # TEST SETUP + create_bucket + # TEST BODY + >> upload_sheet_to_gcs + # TEST TEARDOWN + >> delete_bucket + ) + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +test_run = get_test_run(dag)