diff --git a/airflow/contrib/hooks/sagemaker_hook.py b/airflow/contrib/hooks/sagemaker_hook.py index 1b91e74368475c..9106cba8927370 100644 --- a/airflow/contrib/hooks/sagemaker_hook.py +++ b/airflow/contrib/hooks/sagemaker_hook.py @@ -28,7 +28,7 @@ from airflow.contrib.hooks.aws_hook import AwsHook from airflow.contrib.hooks.aws_logs_hook import AwsLogsHook from airflow.exceptions import AirflowException -from airflow.hooks.S3_hook import S3Hook +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils import timezone diff --git a/airflow/contrib/hooks/wasb_hook.py b/airflow/contrib/hooks/wasb_hook.py index b1eef4aba6ec17..08262fcd0a3757 100644 --- a/airflow/contrib/hooks/wasb_hook.py +++ b/airflow/contrib/hooks/wasb_hook.py @@ -102,7 +102,7 @@ def load_file(self, file_path, container_name, blob_name, **kwargs): `BlockBlobService.create_blob_from_path()` takes. :type kwargs: object """ - # Reorder the argument order from airflow.hooks.S3_hook.load_file. + # Reorder the argument order from airflow.providers.aws.hooks.s3.load_file. self.connection.create_blob_from_path(container_name, blob_name, file_path, **kwargs) @@ -120,7 +120,7 @@ def load_string(self, string_data, container_name, blob_name, **kwargs): `BlockBlobService.create_blob_from_text()` takes. :type kwargs: object """ - # Reorder the argument order from airflow.hooks.S3_hook.load_string. + # Reorder the argument order from airflow.providers.aws.hooks.s3.load_string. self.connection.create_blob_from_text(container_name, blob_name, string_data, **kwargs) diff --git a/airflow/contrib/operators/dynamodb_to_s3.py b/airflow/contrib/operators/dynamodb_to_s3.py index 22b86a3c39279a..f9058bdb04ea14 100644 --- a/airflow/contrib/operators/dynamodb_to_s3.py +++ b/airflow/contrib/operators/dynamodb_to_s3.py @@ -32,8 +32,8 @@ from boto.compat import json # type: ignore from airflow.contrib.hooks.aws_dynamodb_hook import AwsDynamoDBHook -from airflow.hooks.S3_hook import S3Hook from airflow.models.baseoperator import BaseOperator +from airflow.providers.aws.hooks.s3 import S3Hook def _convert_item_to_json_bytes(item): diff --git a/airflow/contrib/operators/imap_attachment_to_s3_operator.py b/airflow/contrib/operators/imap_attachment_to_s3_operator.py index 8e0b9c9c4f9744..170019c1a952f2 100644 --- a/airflow/contrib/operators/imap_attachment_to_s3_operator.py +++ b/airflow/contrib/operators/imap_attachment_to_s3_operator.py @@ -20,8 +20,8 @@ This module allows you to transfer mail attachments from a mail server into s3 bucket. """ from airflow.contrib.hooks.imap_hook import ImapHook -from airflow.hooks.S3_hook import S3Hook from airflow.models import BaseOperator +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults diff --git a/airflow/contrib/operators/mongo_to_s3.py b/airflow/contrib/operators/mongo_to_s3.py index 72d2a0c5b79e6d..ca9005d1a16a17 100644 --- a/airflow/contrib/operators/mongo_to_s3.py +++ b/airflow/contrib/operators/mongo_to_s3.py @@ -21,8 +21,8 @@ from bson import json_util from airflow.contrib.hooks.mongo_hook import MongoHook -from airflow.hooks.S3_hook import S3Hook from airflow.models import BaseOperator +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults diff --git a/airflow/contrib/operators/s3_copy_object_operator.py b/airflow/contrib/operators/s3_copy_object_operator.py index 3c4e99fea29791..b228610af6a6f4 100644 --- a/airflow/contrib/operators/s3_copy_object_operator.py +++ b/airflow/contrib/operators/s3_copy_object_operator.py @@ -17,8 +17,8 @@ # specific language governing permissions and limitations # under the License. -from airflow.hooks.S3_hook import S3Hook from airflow.models import BaseOperator +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults diff --git a/airflow/contrib/operators/s3_delete_objects_operator.py b/airflow/contrib/operators/s3_delete_objects_operator.py index e22e60d29aa261..56a7927602a027 100644 --- a/airflow/contrib/operators/s3_delete_objects_operator.py +++ b/airflow/contrib/operators/s3_delete_objects_operator.py @@ -18,8 +18,8 @@ # under the License. from airflow.exceptions import AirflowException -from airflow.hooks.S3_hook import S3Hook from airflow.models import BaseOperator +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults diff --git a/airflow/contrib/operators/s3_list_operator.py b/airflow/contrib/operators/s3_list_operator.py index b47374c4b21ce1..7dd0f8081a0675 100644 --- a/airflow/contrib/operators/s3_list_operator.py +++ b/airflow/contrib/operators/s3_list_operator.py @@ -19,8 +19,8 @@ from typing import Iterable -from airflow.hooks.S3_hook import S3Hook from airflow.models import BaseOperator +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults diff --git a/airflow/contrib/operators/s3_to_gcs_operator.py b/airflow/contrib/operators/s3_to_gcs_operator.py index 0c86e481da95d2..3e8404fd780784 100644 --- a/airflow/contrib/operators/s3_to_gcs_operator.py +++ b/airflow/contrib/operators/s3_to_gcs_operator.py @@ -22,7 +22,7 @@ from airflow.contrib.operators.s3_list_operator import S3ListOperator from airflow.exceptions import AirflowException from airflow.gcp.hooks.gcs import GoogleCloudStorageHook, _parse_gcs_url -from airflow.hooks.S3_hook import S3Hook +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults diff --git a/airflow/contrib/operators/s3_to_sftp_operator.py b/airflow/contrib/operators/s3_to_sftp_operator.py index 9fc95788f8091e..f86150af360b46 100644 --- a/airflow/contrib/operators/s3_to_sftp_operator.py +++ b/airflow/contrib/operators/s3_to_sftp_operator.py @@ -21,8 +21,8 @@ from urllib.parse import urlparse from airflow.contrib.hooks.ssh_hook import SSHHook -from airflow.hooks.S3_hook import S3Hook from airflow.models import BaseOperator +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults diff --git a/airflow/contrib/operators/sftp_to_s3_operator.py b/airflow/contrib/operators/sftp_to_s3_operator.py index 5dabb3bfc168cc..b88cdb107c4f1d 100644 --- a/airflow/contrib/operators/sftp_to_s3_operator.py +++ b/airflow/contrib/operators/sftp_to_s3_operator.py @@ -21,8 +21,8 @@ from urllib.parse import urlparse from airflow.contrib.hooks.ssh_hook import SSHHook -from airflow.hooks.S3_hook import S3Hook from airflow.models import BaseOperator +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults diff --git a/airflow/operators/gcs_to_s3.py b/airflow/operators/gcs_to_s3.py index b4b41c5fac8168..b3484c9a4fe9ae 100644 --- a/airflow/operators/gcs_to_s3.py +++ b/airflow/operators/gcs_to_s3.py @@ -23,7 +23,7 @@ from airflow.gcp.hooks.gcs import GoogleCloudStorageHook from airflow.gcp.operators.gcs import GoogleCloudStorageListOperator -from airflow.hooks.S3_hook import S3Hook +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults diff --git a/airflow/operators/google_api_to_s3_transfer.py b/airflow/operators/google_api_to_s3_transfer.py index d05ff4a42b372a..16d45dcab0ec5c 100644 --- a/airflow/operators/google_api_to_s3_transfer.py +++ b/airflow/operators/google_api_to_s3_transfer.py @@ -24,9 +24,9 @@ import sys from airflow.gcp.hooks.discovery_api import GoogleDiscoveryApiHook -from airflow.hooks.S3_hook import S3Hook from airflow.models import BaseOperator from airflow.models.xcom import MAX_XCOM_SIZE +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults diff --git a/airflow/operators/redshift_to_s3_operator.py b/airflow/operators/redshift_to_s3_operator.py index 2aac34991fb973..ec47f4eaaa8ee4 100644 --- a/airflow/operators/redshift_to_s3_operator.py +++ b/airflow/operators/redshift_to_s3_operator.py @@ -22,8 +22,8 @@ from typing import List, Optional, Union from airflow.hooks.postgres_hook import PostgresHook -from airflow.hooks.S3_hook import S3Hook from airflow.models import BaseOperator +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults diff --git a/airflow/operators/s3_file_transform_operator.py b/airflow/operators/s3_file_transform_operator.py index bfcb733478f66a..d4d46c3d6cb64a 100644 --- a/airflow/operators/s3_file_transform_operator.py +++ b/airflow/operators/s3_file_transform_operator.py @@ -23,8 +23,8 @@ from typing import Optional, Union from airflow.exceptions import AirflowException -from airflow.hooks.S3_hook import S3Hook from airflow.models import BaseOperator +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults @@ -111,10 +111,8 @@ def execute(self, context): raise AirflowException( "Either transform_script or select_expression must be specified") - source_s3 = S3Hook(aws_conn_id=self.source_aws_conn_id, - verify=self.source_verify) - dest_s3 = S3Hook(aws_conn_id=self.dest_aws_conn_id, - verify=self.dest_verify) + source_s3 = S3Hook(aws_conn_id=self.source_aws_conn_id, verify=self.source_verify) + dest_s3 = S3Hook(aws_conn_id=self.dest_aws_conn_id, verify=self.dest_verify) self.log.info("Downloading source S3 file %s", self.source_s3_key) if not source_s3.check_for_key(self.source_s3_key): diff --git a/airflow/operators/s3_to_hive_operator.py b/airflow/operators/s3_to_hive_operator.py index 9c174e25849ae7..d2dfc6c42262f1 100644 --- a/airflow/operators/s3_to_hive_operator.py +++ b/airflow/operators/s3_to_hive_operator.py @@ -26,8 +26,8 @@ from airflow.exceptions import AirflowException from airflow.hooks.hive_hooks import HiveCliHook -from airflow.hooks.S3_hook import S3Hook from airflow.models import BaseOperator +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.compression import uncompress_file from airflow.utils.decorators import apply_defaults from airflow.utils.file import TemporaryDirectory diff --git a/airflow/operators/s3_to_redshift_operator.py b/airflow/operators/s3_to_redshift_operator.py index 0b393edfca1cb6..958bcf8818e767 100644 --- a/airflow/operators/s3_to_redshift_operator.py +++ b/airflow/operators/s3_to_redshift_operator.py @@ -19,8 +19,8 @@ from typing import List, Optional, Union from airflow.hooks.postgres_hook import PostgresHook -from airflow.hooks.S3_hook import S3Hook from airflow.models import BaseOperator +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.decorators import apply_defaults diff --git a/airflow/sensors/s3_key_sensor.py b/airflow/sensors/s3_key_sensor.py index 3eca9784ad5d0c..c0a95f96e48184 100644 --- a/airflow/sensors/s3_key_sensor.py +++ b/airflow/sensors/s3_key_sensor.py @@ -88,7 +88,7 @@ def __init__(self, self.verify = verify def poke(self, context): - from airflow.hooks.S3_hook import S3Hook + from airflow.providers.aws.hooks.s3 import S3Hook hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify) self.log.info('Poking for key : s3://%s/%s', self.bucket_name, self.bucket_key) if self.wildcard_match: diff --git a/airflow/sensors/s3_prefix_sensor.py b/airflow/sensors/s3_prefix_sensor.py index 18db735d10cfab..63d2f668d6f596 100644 --- a/airflow/sensors/s3_prefix_sensor.py +++ b/airflow/sensors/s3_prefix_sensor.py @@ -73,7 +73,7 @@ def __init__(self, def poke(self, context): self.log.info('Poking for prefix : %s in bucket s3://%s', self.prefix, self.bucket_name) - from airflow.hooks.S3_hook import S3Hook + from airflow.providers.aws.hooks.s3 import S3Hook hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify) return hook.check_for_prefix( prefix=self.prefix, diff --git a/airflow/utils/log/s3_task_handler.py b/airflow/utils/log/s3_task_handler.py index aa7f608d6974cf..f168a95857633a 100644 --- a/airflow/utils/log/s3_task_handler.py +++ b/airflow/utils/log/s3_task_handler.py @@ -43,7 +43,7 @@ def __init__(self, base_log_folder, s3_log_folder, filename_template): def hook(self): remote_conn_id = conf.get('core', 'REMOTE_LOG_CONN_ID') try: - from airflow.hooks.S3_hook import S3Hook + from airflow.providers.aws.hooks.s3 import S3Hook return S3Hook(remote_conn_id) except Exception: self.log.error( diff --git a/tests/contrib/hooks/test_sagemaker_hook.py b/tests/contrib/hooks/test_sagemaker_hook.py index 2d9d26b2ed79da..b77e94ccbef19c 100644 --- a/tests/contrib/hooks/test_sagemaker_hook.py +++ b/tests/contrib/hooks/test_sagemaker_hook.py @@ -29,7 +29,7 @@ LogState, SageMakerHook, secondary_training_status_changed, secondary_training_status_message, ) from airflow.exceptions import AirflowException -from airflow.hooks.S3_hook import S3Hook +from airflow.providers.aws.hooks.s3 import S3Hook from tests.compat import mock role = 'arn:aws:iam:role/test-role' diff --git a/tests/contrib/operators/test_s3_to_sftp_operator.py b/tests/contrib/operators/test_s3_to_sftp_operator.py index 829e57c4d33f66..904781ff50b6d5 100644 --- a/tests/contrib/operators/test_s3_to_sftp_operator.py +++ b/tests/contrib/operators/test_s3_to_sftp_operator.py @@ -61,7 +61,7 @@ class TestS3ToSFTPOperator(unittest.TestCase): @mock_s3 def setUp(self): from airflow.contrib.hooks.ssh_hook import SSHHook - from airflow.hooks.S3_hook import S3Hook + from airflow.providers.aws.hooks.s3 import S3Hook hook = SSHHook(ssh_conn_id='ssh_default') s3_hook = S3Hook('aws_default') diff --git a/tests/contrib/operators/test_sftp_to_s3_operator.py b/tests/contrib/operators/test_sftp_to_s3_operator.py index 1e444ecb76b005..0fb4b6fcb58c25 100644 --- a/tests/contrib/operators/test_sftp_to_s3_operator.py +++ b/tests/contrib/operators/test_sftp_to_s3_operator.py @@ -26,8 +26,8 @@ from airflow.contrib.hooks.ssh_hook import SSHHook from airflow.contrib.operators.sftp_to_s3_operator import SFTPToS3Operator from airflow.contrib.operators.ssh_operator import SSHOperator -from airflow.hooks.S3_hook import S3Hook from airflow.models import DAG, TaskInstance +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.settings import Session from airflow.utils import timezone from airflow.utils.timezone import datetime diff --git a/tests/hooks/test_s3_hook.py b/tests/hooks/test_s3_hook.py index a06b17b4ec0033..c8709416ff8261 100644 --- a/tests/hooks/test_s3_hook.py +++ b/tests/hooks/test_s3_hook.py @@ -23,11 +23,11 @@ from botocore.exceptions import NoCredentialsError -from airflow.hooks.S3_hook import provide_bucket_name from airflow.models import Connection +from airflow.providers.aws.hooks.s3 import provide_bucket_name try: - from airflow.hooks.S3_hook import S3Hook + from airflow.providers.aws.hooks.s3 import S3Hook except ImportError: S3Hook = None # type: ignore diff --git a/tests/operators/test_gcs_to_s3.py b/tests/operators/test_gcs_to_s3.py index 4e9d2d2a8df5a0..a8858fc1d0c795 100644 --- a/tests/operators/test_gcs_to_s3.py +++ b/tests/operators/test_gcs_to_s3.py @@ -19,8 +19,8 @@ import unittest -from airflow.hooks.S3_hook import S3Hook from airflow.operators.gcs_to_s3 import GoogleCloudStorageToS3Operator +from airflow.providers.aws.hooks.s3 import S3Hook from tests.compat import mock try: diff --git a/tests/operators/test_s3_file_transform_operator.py b/tests/operators/test_s3_file_transform_operator.py index f7dbed2f6a2bc2..c49d825b16bad0 100644 --- a/tests/operators/test_s3_file_transform_operator.py +++ b/tests/operators/test_s3_file_transform_operator.py @@ -112,7 +112,7 @@ def test_execute_with_failing_transform_script(self, mock_Popen): self.assertEqual('Transform script failed: 42', str(e.exception)) - @mock.patch('airflow.hooks.S3_hook.S3Hook.select_key', return_value="input") + @mock.patch('airflow.providers.aws.hooks.s3.S3Hook.select_key', return_value="input") @mock_s3 def test_execute_with_select_expression(self, mock_select_key): bucket = "bucket" diff --git a/tests/operators/test_s3_to_hive_operator.py b/tests/operators/test_s3_to_hive_operator.py index d1241981bc5600..bfac9c531b277e 100644 --- a/tests/operators/test_s3_to_hive_operator.py +++ b/tests/operators/test_s3_to_hive_operator.py @@ -274,7 +274,7 @@ def test_execute_with_select_expression(self, mock_hiveclihook): input_serialization['CSV']['FileHeaderInfo'] = 'USE' # Confirm that select_key was called with the right params - with mock.patch('airflow.hooks.S3_hook.S3Hook.select_key', + with mock.patch('airflow.providers.aws.hooks.s3.S3Hook.select_key', return_value="") as mock_select_key: # Execute S3ToHiveTransfer s32hive = S3ToHiveTransfer(**self.kwargs) diff --git a/tests/sensors/test_s3_key_sensor.py b/tests/sensors/test_s3_key_sensor.py index 70ebfebf389a8d..213bc7bc0f5882 100644 --- a/tests/sensors/test_s3_key_sensor.py +++ b/tests/sensors/test_s3_key_sensor.py @@ -64,7 +64,7 @@ def test_parse_bucket_key(self, key, bucket, parsed_key, parsed_bucket): self.assertEqual(s.bucket_key, parsed_key) self.assertEqual(s.bucket_name, parsed_bucket) - @mock.patch('airflow.hooks.S3_hook.S3Hook') + @mock.patch('airflow.providers.aws.hooks.s3.S3Hook') def test_poke(self, mock_hook): s = S3KeySensor( task_id='s3_key_sensor', @@ -78,7 +78,7 @@ def test_poke(self, mock_hook): mock_hook.return_value.check_for_key.return_value = True self.assertTrue(s.poke(None)) - @mock.patch('airflow.hooks.S3_hook.S3Hook') + @mock.patch('airflow.providers.aws.hooks.s3.S3Hook') def test_poke_wildcard(self, mock_hook): s = S3KeySensor( task_id='s3_key_sensor', diff --git a/tests/sensors/test_s3_prefix_sensor.py b/tests/sensors/test_s3_prefix_sensor.py index f7a5bfa87d6172..c737f960364296 100644 --- a/tests/sensors/test_s3_prefix_sensor.py +++ b/tests/sensors/test_s3_prefix_sensor.py @@ -25,7 +25,7 @@ class TestS3PrefixSensor(unittest.TestCase): - @mock.patch('airflow.hooks.S3_hook.S3Hook') + @mock.patch('airflow.providers.aws.hooks.s3.S3Hook') def test_poke(self, mock_hook): s = S3PrefixSensor( task_id='s3_prefix', diff --git a/tests/utils/log/test_s3_task_handler.py b/tests/utils/log/test_s3_task_handler.py index 2557d796bdca42..44226793faa9d2 100644 --- a/tests/utils/log/test_s3_task_handler.py +++ b/tests/utils/log/test_s3_task_handler.py @@ -21,9 +21,9 @@ import unittest from unittest import mock -from airflow.hooks.S3_hook import S3Hook from airflow.models import DAG, TaskInstance from airflow.operators.dummy_operator import DummyOperator +from airflow.providers.aws.hooks.s3 import S3Hook from airflow.utils.log.s3_task_handler import S3TaskHandler from airflow.utils.state import State from airflow.utils.timezone import datetime @@ -81,7 +81,7 @@ def test_hook(self): def test_hook_raises(self): handler = self.s3_task_handler with mock.patch.object(handler.log, 'error') as mock_error: - with mock.patch("airflow.hooks.S3_hook.S3Hook") as mock_hook: + with mock.patch("airflow.providers.aws.hooks.s3.S3Hook") as mock_hook: mock_hook.side_effect = Exception('Failed to connect') # Initialize the hook handler.hook @@ -103,7 +103,7 @@ def test_log_exists_raises(self): self.assertFalse(self.s3_task_handler.s3_log_exists('s3://nonexistentbucket/foo')) def test_log_exists_no_hook(self): - with mock.patch("airflow.hooks.S3_hook.S3Hook") as mock_hook: + with mock.patch("airflow.providers.aws.hooks.s3.S3Hook") as mock_hook: mock_hook.side_effect = Exception('Failed to connect') self.assertFalse(self.s3_task_handler.s3_log_exists(self.remote_log_location))