Skip to content

Commit

Permalink
Make storage upload have no chunk size by default.
Browse files Browse the repository at this point in the history
Fixes #546.
  • Loading branch information
dhermes committed Feb 10, 2015
1 parent b395360 commit afc9994
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 9 deletions.
23 changes: 19 additions & 4 deletions gcloud/storage/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,8 @@ def download_as_string(self):
return string_buffer.getvalue()

def upload_from_file(self, file_obj, rewind=False, size=None,
content_type=None, num_retries=6):
content_type=None, num_retries=6,
upload_chunk_size=None):
"""Upload the contents of this blob from a file-like object.
.. note::
Expand All @@ -285,6 +286,11 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
`lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
API documents for details.
The current ``Blob``'s chunk size is not used by default. Default
behavior is instead to attempt uploading the entire object. See
https://github.com/GoogleCloudPlatform/gcloud-python/issues/546
for more details.
:type file_obj: file
:param file_obj: A file handle open for reading.
Expand All @@ -296,6 +302,15 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
:param size: The number of bytes to read from the file handle.
If not provided, we'll try to guess the size using
:func:`os.fstat`
:type content_type: string or ``NoneType``
:param content_type: Optional content type of uploaded content.
:type num_retries: int
:param num_retries: Optional number of retries. Defaults to 6.
:type upload_chunk_size: int or ``NoneType``
:param upload_chunk_size: Optional size of chunks to upload with.
"""
# Rewind the file if desired.
if rewind:
Expand All @@ -313,7 +328,7 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
upload = transfer.Upload(file_obj,
content_type or 'application/unknown',
total_bytes, auto_transfer=False,
chunksize=self.CHUNK_SIZE)
chunksize=upload_chunk_size)

url_builder = _UrlBuilder(bucket_name=self.bucket.name,
object_name=self.name)
Expand Down Expand Up @@ -611,7 +626,7 @@ def updated(self):


class _UploadConfig(object):
""" Faux message FBO apitools' 'ConfigureRequest'.
"""Faux message for benefit of apitools' 'ConfigureRequest'.
Values extracted from apitools
'samples/storage_sample/storage/storage_v1_client.py'
Expand All @@ -625,7 +640,7 @@ class _UploadConfig(object):


class _UrlBuilder(object):
"""Faux builder FBO apitools' 'ConfigureRequest'"""
"""Faux builder for benefit of apitools' 'ConfigureRequest'"""
def __init__(self, bucket_name, object_name):
self.query_params = {'name': object_name}
self._bucket_name = bucket_name
Expand Down
13 changes: 8 additions & 5 deletions gcloud/storage/test_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -382,13 +382,14 @@ def test_upload_from_file_resumable(self):
)
bucket = _Bucket(connection)
blob = self._makeOne(BLOB_NAME, bucket=bucket)
blob.CHUNK_SIZE = 5
UPLOAD_CHUNK_SIZE = 5
# Set the threshhold low enough that we force a resumable uploada.
with _Monkey(transfer, _RESUMABLE_UPLOAD_THRESHOLD=5):
with NamedTemporaryFile() as fh:
fh.write(DATA)
fh.flush()
blob.upload_from_file(fh, rewind=True)
blob.upload_from_file(fh, rewind=True,
upload_chunk_size=UPLOAD_CHUNK_SIZE)
rq = connection.http._requested
self.assertEqual(len(rq), 3)
self.assertEqual(rq[0]['method'], 'POST')
Expand All @@ -408,16 +409,18 @@ def test_upload_from_file_resumable(self):
self.assertEqual(rq[1]['uri'], UPLOAD_URL)
headers = dict(
[(x.title(), str(y)) for x, y in rq[1]['headers'].items()])
self.assertEqual(rq[1]['body'], DATA[:5])
self.assertEqual(rq[1]['body'], DATA[:UPLOAD_CHUNK_SIZE])
headers = dict(
[(x.title(), str(y)) for x, y in rq[1]['headers'].items()])
self.assertEqual(headers['Content-Range'], 'bytes 0-4/6')
self.assertEqual(rq[2]['method'], 'PUT')
self.assertEqual(rq[2]['uri'], UPLOAD_URL)
self.assertEqual(rq[2]['body'], DATA[5:])
self.assertEqual(rq[2]['body'], DATA[UPLOAD_CHUNK_SIZE:])
headers = dict(
[(x.title(), str(y)) for x, y in rq[2]['headers'].items()])
self.assertEqual(headers['Content-Range'], 'bytes 5-5/6')
content_range = 'bytes %d-%d/%d' % (
UPLOAD_CHUNK_SIZE, UPLOAD_CHUNK_SIZE, len(DATA))
self.assertEqual(headers['Content-Range'], content_range)

def test_upload_from_file_w_slash_in_name(self):
from six.moves.http_client import OK
Expand Down

0 comments on commit afc9994

Please sign in to comment.