Skip to content

Commit

Permalink
[AIRFLOW-4438] Add Gzip compression to S3_hook (#7680)
Browse files Browse the repository at this point in the history
  • Loading branch information
OmairK authored and kaxil committed Mar 19, 2020
1 parent bdc461b commit 8ca782a
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 4 deletions.
25 changes: 21 additions & 4 deletions airflow/hooks/S3_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,23 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

# pylint: disable=invalid-name
"""
Interact with AWS S3, using the boto3 library.
"""
import fnmatch
import gzip as gz
import re
import shutil
from urllib.parse import urlparse

from botocore.exceptions import ClientError

from airflow.exceptions import AirflowException
from airflow.contrib.hooks.aws_hook import AwsHook

from six import BytesIO
from urllib.parse import urlparse
import re
import fnmatch


class S3Hook(AwsHook):
Expand Down Expand Up @@ -318,7 +326,8 @@ def load_file(self,
key,
bucket_name=None,
replace=False,
encrypt=False):
encrypt=False,
gzip=False):
"""
Loads a local file to S3
Expand All @@ -335,6 +344,8 @@ def load_file(self,
:param encrypt: If True, the file will be encrypted on the server-side
by S3 and will be stored in an encrypted form while at rest in S3.
:type encrypt: bool
:param gzip: If True, the file will be compressed locally
:type gzip: bool
"""
if not bucket_name:
(bucket_name, key) = self.parse_s3_url(key)
Expand All @@ -345,6 +356,12 @@ def load_file(self,
extra_args = {}
if encrypt:
extra_args['ServerSideEncryption'] = "AES256"
if gzip:
filename_gz = filename.name + '.gz'
with open(filename.name, 'rb') as f_in:
with gz.open(filename_gz, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
filename = filename_gz

client = self.get_conn()
client.upload_file(filename, bucket_name, key, ExtraArgs=extra_args)
Expand Down
16 changes: 16 additions & 0 deletions tests/hooks/test_s3_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
# under the License.
#

import gzip as gz

import mock
import tempfile
import unittest
Expand Down Expand Up @@ -297,6 +299,20 @@ def test_load_fileobj(self):

self.assertEqual(body, b'Content')

@mock_s3
def test_load_file_gzip(self):
hook = S3Hook()
conn = hook.get_conn()
# We need to create the bucket since this is all in Moto's 'virtual'
# AWS account
conn.create_bucket(Bucket="mybucket")
with tempfile.NamedTemporaryFile() as temp_file:
temp_file.write(b"Content")
temp_file.seek(0)
hook.load_file(temp_file, "my_key", 'mybucket', gzip=True)
resource = boto3.resource('s3').Object('mybucket', 'my_key') # pylint: disable=no-member
assert gz.decompress(resource.get()['Body'].read()) == b'Content'


if __name__ == '__main__':
unittest.main()

0 comments on commit 8ca782a

Please sign in to comment.