Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Using copy and upload responses in storage. #751

Merged
merged 2 commits into from
Mar 23, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 11 additions & 3 deletions gcloud/storage/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
"""Create / interact with Google Cloud Storage blobs."""

import copy
import json
import mimetypes
import os
import time
Expand Down Expand Up @@ -353,10 +354,17 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
# pass them as None, because apitools wants to print to the console
# by default.
if upload.strategy == transfer._RESUMABLE_UPLOAD:
upload.StreamInChunks(callback=lambda *args: None,
finish_callback=lambda *args: None)
http_response = upload.StreamInChunks(
callback=lambda *args: None,
finish_callback=lambda *args: None)
else:
http_wrapper.MakeRequest(conn.http, request, retries=num_retries)
http_response = http_wrapper.MakeRequest(conn.http, request,
retries=num_retries)
response_content = http_response.content
if not isinstance(response_content,
six.string_types): # pragma: NO COVER Python3
response_content = response_content.decode('utf-8')
self._properties = json.loads(response_content)

def upload_from_filename(self, filename, content_type=None):
"""Upload this blob's contents from the content of a named file.
Expand Down
3 changes: 2 additions & 1 deletion gcloud/storage/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,8 @@ def copy_blob(self, blob, destination_bucket, new_name=None):
new_name = blob.name
new_blob = Blob(bucket=destination_bucket, name=new_name)
api_path = blob.path + '/copyTo' + new_blob.path
self.connection.api_request(method='POST', path=api_path)
copy_result = self.connection.api_request(method='POST', path=api_path)
new_blob._properties = copy_result
return new_blob

def upload_file(self, filename, blob_name=None):
Expand Down
13 changes: 7 additions & 6 deletions gcloud/storage/test_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ def _upload_from_file_simple_test_helper(self, properties=None,
DATA = b'ABCDEF'
response = {'status': OK}
connection = _Connection(
(response, b''),
(response, b'{}'),
)
bucket = _Bucket(connection)
blob = self._makeOne(BLOB_NAME, bucket=bucket, properties=properties)
Expand Down Expand Up @@ -412,10 +412,11 @@ def test_upload_from_file_resumable(self):
chunk1_response = {'status': http_wrapper.RESUME_INCOMPLETE,
'range': 'bytes 0-4'}
chunk2_response = {'status': OK}
# Need valid JSON on last response, since resumable.
connection = _Connection(
(loc_response, b''),
(chunk1_response, b''),
(chunk2_response, b''),
(chunk2_response, b'{}'),
)
bucket = _Bucket(connection)
blob = self._makeOne(BLOB_NAME, bucket=bucket)
Expand Down Expand Up @@ -470,7 +471,7 @@ def test_upload_from_file_w_slash_in_name(self):
'range': 'bytes 0-4'}
chunk2_response = {'status': OK}
connection = _Connection(
(loc_response, ''),
(loc_response, '{}'),
(chunk1_response, ''),
(chunk2_response, ''),
)
Expand Down Expand Up @@ -512,7 +513,7 @@ def _upload_from_filename_test_helper(self, properties=None,
'range': 'bytes 0-4'}
chunk2_response = {'status': OK}
connection = _Connection(
(loc_response, ''),
(loc_response, '{}'),
(chunk1_response, ''),
(chunk2_response, ''),
)
Expand Down Expand Up @@ -576,7 +577,7 @@ def test_upload_from_string_w_bytes(self):
'range': 'bytes 0-4'}
chunk2_response = {'status': OK}
connection = _Connection(
(loc_response, ''),
(loc_response, '{}'),
(chunk1_response, ''),
(chunk2_response, ''),
)
Expand Down Expand Up @@ -614,7 +615,7 @@ def test_upload_from_string_w_text(self):
'range': 'bytes 0-4'}
chunk2_response = {'status': OK}
connection = _Connection(
(loc_response, ''),
(loc_response, '{}'),
(chunk1_response, ''),
(chunk2_response, ''),
)
Expand Down
Binary file not shown.
13 changes: 4 additions & 9 deletions regression/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ class TestStorageFiles(unittest2.TestCase):
'path': 'regression/data/CloudPlatform_128px_Retina.png',
},
'big': {
'path': 'regression/data/five-mb-file.zip',
'path': 'regression/data/five-point-one-mb-file.zip',
},
'simple': {
'path': 'regression/data/simple.txt',
Expand Down Expand Up @@ -119,21 +119,19 @@ def test_large_file_write_from_stream(self):

file_data = self.FILES['big']
with open(file_data['path'], 'rb') as file_obj:
self.bucket.upload_file_object(file_obj, blob_name=blob.name)
blob.upload_from_file(file_obj)
self.case_blobs_to_delete.append(blob)

blob._reload_properties() # force a reload
self.assertEqual(blob.md5_hash, file_data['hash'])

def test_small_file_write_from_filename(self):
blob = storage.Blob(bucket=self.bucket, name='LargeFile')
blob = storage.Blob(bucket=self.bucket, name='SmallFile')
self.assertEqual(blob._properties, {})

file_data = self.FILES['simple']
blob.upload_from_filename(file_data['path'])
self.case_blobs_to_delete.append(blob)

blob._reload_properties() # force a reload
self.assertEqual(blob.md5_hash, file_data['hash'])

def test_write_metadata(self):
Expand All @@ -143,7 +141,6 @@ def test_write_metadata(self):
# NOTE: This should not be necessary. We should be able to pass
# it in to upload_file and also to upload_from_string.
blob.content_type = 'image/png'
blob._reload_properties() # force a reload
self.assertEqual(blob.content_type, 'image/png')

def test_direct_write_and_read_into_file(self):
Expand All @@ -153,7 +150,7 @@ def test_direct_write_and_read_into_file(self):
self.case_blobs_to_delete.append(blob)

same_blob = storage.Blob(bucket=self.bucket, name='MyBuffer')
same_blob._reload_properties() # force a reload
same_blob._reload_properties() # Initialize properties.
temp_filename = tempfile.mktemp()
with open(temp_filename, 'w') as file_obj:
same_blob.download_to_file(file_obj)
Expand All @@ -171,9 +168,7 @@ def test_copy_existing_file(self):
new_blob = self.bucket.copy_blob(blob, self.bucket, 'CloudLogoCopy')
self.case_blobs_to_delete.append(new_blob)

blob._reload_properties() # force a reload
base_contents = blob.download_as_string()
new_blob._reload_properties() # force a reload
copied_contents = new_blob.download_as_string()
self.assertEqual(base_contents, copied_contents)

Expand Down