Skip to content

Commit

Permalink
Merge pull request #1717 from tseaver/1712-storage-upload_drops_40x_e…
Browse files Browse the repository at this point in the history
…rrors_on_floor

Raise appropriate exception when upload fails.
  • Loading branch information
tseaver committed Apr 18, 2016
2 parents 9ad9a00 + d8729b7 commit 5d6c3c6
Show file tree
Hide file tree
Showing 2 changed files with 82 additions and 3 deletions.
18 changes: 17 additions & 1 deletion gcloud/storage/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,14 @@
import os
import time

import httplib2
import six
from six.moves.urllib.parse import quote

from gcloud._helpers import _rfc3339_to_datetime
from gcloud.credentials import generate_signed_url
from gcloud.exceptions import NotFound
from gcloud.exceptions import make_exception
from gcloud.storage._helpers import _PropertyMixin
from gcloud.storage._helpers import _scalar_property
from gcloud.storage.acl import ObjectACL
Expand Down Expand Up @@ -346,6 +348,16 @@ def download_as_string(self, client=None):
self.download_to_file(string_buffer, client=client)
return string_buffer.getvalue()

@staticmethod
def _check_response_error(request, http_response):
"""Helper for :meth:`upload_from_file`."""
info = http_response.info
status = int(info['status'])
if not 200 <= status < 300:
faux_response = httplib2.Response({'status': status})
raise make_exception(faux_response, http_response.content,
error_info=request.url)

def upload_from_file(self, file_obj, rewind=False, size=None,
content_type=None, num_retries=6, client=None):
"""Upload the contents of this blob from a file-like object.
Expand Down Expand Up @@ -390,7 +402,8 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
to the ``client`` stored on the blob's bucket.
:raises: :class:`ValueError` if size is not passed in and can not be
determined
determined; :class:`gcloud.exceptions.GCloudError` if the
upload response returns an error status.
"""
client = self._require_client(client)
# Use the private ``_connection`` rather than the public
Expand Down Expand Up @@ -452,7 +465,10 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
else:
http_response = make_api_request(connection.http, request,
retries=num_retries)

self._check_response_error(request, http_response)
response_content = http_response.content

if not isinstance(response_content,
six.string_types): # pragma: NO COVER Python3
response_content = response_content.decode('utf-8')
Expand Down
67 changes: 65 additions & 2 deletions gcloud/storage/test_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,15 +418,18 @@ def test_upload_from_file_size_failure(self):
def _upload_from_file_simple_test_helper(self, properties=None,
content_type_arg=None,
expected_content_type=None,
chunk_size=5):
chunk_size=5,
status=None):
from six.moves.http_client import OK
from six.moves.urllib.parse import parse_qsl
from six.moves.urllib.parse import urlsplit
from gcloud._testing import _NamedTemporaryFile

BLOB_NAME = 'blob-name'
DATA = b'ABCDEF'
response = {'status': OK}
if status is None:
status = OK
response = {'status': status}
connection = _Connection(
(response, b'{}'),
)
Expand Down Expand Up @@ -463,6 +466,12 @@ def test_upload_from_file_simple(self):
self._upload_from_file_simple_test_helper(
expected_content_type='application/octet-stream')

def test_upload_from_file_simple_not_found(self):
from six.moves.http_client import NOT_FOUND
from gcloud.exceptions import NotFound
with self.assertRaises(NotFound):
self._upload_from_file_simple_test_helper(status=NOT_FOUND)

def test_upload_from_file_simple_w_chunk_size_None(self):
self._upload_from_file_simple_test_helper(
expected_content_type='application/octet-stream',
Expand Down Expand Up @@ -572,6 +581,60 @@ def test_upload_from_file_resumable(self):
'redirections': 5,
})

def test_upload_from_file_resumable_w_error(self):
from six.moves.http_client import NOT_FOUND
from six.moves.urllib.parse import parse_qsl
from six.moves.urllib.parse import urlsplit
from gcloud._testing import _Monkey
from gcloud._testing import _NamedTemporaryFile
from gcloud.streaming import transfer
from gcloud.streaming.exceptions import HttpError

BLOB_NAME = 'blob-name'
DATA = b'ABCDEF'
loc_response = {'status': NOT_FOUND}
connection = _Connection(
(loc_response, b'{"error": "no such bucket"}'),
)
client = _Client(connection)
bucket = _Bucket(client)
blob = self._makeOne(BLOB_NAME, bucket=bucket)
blob._CHUNK_SIZE_MULTIPLE = 1
blob.chunk_size = 5

# Set the threshhold low enough that we force a resumable uploada.
with _Monkey(transfer, RESUMABLE_UPLOAD_THRESHOLD=5):
with _NamedTemporaryFile() as temp:
with open(temp.name, 'wb') as file_obj:
file_obj.write(DATA)
with open(temp.name, 'rb') as file_obj:
with self.assertRaises(HttpError):
blob.upload_from_file(file_obj, rewind=True)

rq = connection.http._requested
self.assertEqual(len(rq), 1)

# Requested[0]
headers = dict(
[(x.title(), str(y)) for x, y in rq[0].pop('headers').items()])
self.assertEqual(headers['X-Upload-Content-Length'], '6')
self.assertEqual(headers['X-Upload-Content-Type'],
'application/octet-stream')

uri = rq[0].pop('uri')
scheme, netloc, path, qs, _ = urlsplit(uri)
self.assertEqual(scheme, 'http')
self.assertEqual(netloc, 'example.com')
self.assertEqual(path, '/b/name/o')
self.assertEqual(dict(parse_qsl(qs)),
{'uploadType': 'resumable', 'name': BLOB_NAME})
self.assertEqual(rq[0], {
'method': 'POST',
'body': '',
'connection_type': None,
'redirections': 5,
})

def test_upload_from_file_w_slash_in_name(self):
from six.moves.http_client import OK
from six.moves.urllib.parse import parse_qsl
Expand Down

0 comments on commit 5d6c3c6

Please sign in to comment.