Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Removing HTTP request in _PropertyMixin.properties. #688

Merged
merged 1 commit into from
Mar 13, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 2 additions & 36 deletions gcloud/storage/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,10 @@ class _PropertyMixin(object):
"""Abstract mixin for cloud storage classes with associated propertties.

Non-abstract subclasses should implement:
- CUSTOM_PROPERTY_ACCESSORS
- connection
- path
"""

CUSTOM_PROPERTY_ACCESSORS = None
"""Mapping of field name -> accessor for fields w/ custom accessors.

Expected to be set by subclasses. Fields in this mapping will cause
:meth:`_get_property()` to raise a KeyError with a message to use the
relevant accessor methods.
"""

@property
def connection(self):
"""Abstract getter for the connection to use."""
Expand All @@ -64,12 +55,11 @@ def __init__(self, name=None, properties=None):

@property
def properties(self):
"""Ensure properties are loaded, and return a copy.
"""Return a copy of properties.

:rtype: dict
:returns: Copy of properties.
"""
if not self._properties:
self._reload_properties()
return self._properties.copy()

@property
Expand Down Expand Up @@ -131,30 +121,6 @@ def _patch_properties(self, properties):
query_params={'projection': 'full'})
return self

def _get_property(self, field, default=None):
"""Return the value of a field from the server-side representation.

If you request a field that isn't available, and that field can
be retrieved by refreshing data from Cloud Storage, this method
will reload the data using :func:`_PropertyMixin._reload_properties`.

:type field: string
:param field: A particular field to retrieve from properties.

:type default: anything
:param default: The value to return if the field provided wasn't found.

:rtype: anything
:returns: value of the specific field, or the default if not found.
"""
# Raise for fields which have custom accessors.
custom = self.CUSTOM_PROPERTY_ACCESSORS.get(field)
if custom is not None:
message = "Use '%s' or related methods instead." % custom
raise KeyError((field, message))

return self.properties.get(field, default)


class _PropertyBatch(object):
"""Context manager: Batch updates to object's ``_patch_properties``
Expand Down
23 changes: 0 additions & 23 deletions gcloud/storage/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,29 +52,6 @@ class Blob(_PropertyMixin):
:param properties: All the other data provided by Cloud Storage.
"""

CUSTOM_PROPERTY_ACCESSORS = {
'acl': 'acl',
'cacheControl': 'cache_control',
'contentDisposition': 'content_disposition',
'contentEncoding': 'content_encoding',
'contentLanguage': 'content_language',
'contentType': 'content_type',
'componentCount': 'component_count',
'etag': 'etag',
'generation': 'generation',
'id': 'id',
'mediaLink': 'media_link',
'metageneration': 'metageneration',
'name': 'name',
'owner': 'owner',
'selfLink': 'self_link',
'size': 'size',
'storageClass': 'storage_class',
'timeDeleted': 'time_deleted',
'updated': 'updated',
}
"""Map field name -> accessor for fields w/ custom accessors."""

CHUNK_SIZE = 1024 * 1024 # 1 MB.
"""The size of a chunk of data whenever iterating (1 MB).

Expand Down
21 changes: 1 addition & 20 deletions gcloud/storage/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,26 +86,6 @@ class Bucket(_PropertyMixin):
_MAX_OBJECTS_FOR_BUCKET_DELETE = 256
"""Maximum number of existing objects allowed in Bucket.delete()."""

CUSTOM_PROPERTY_ACCESSORS = {
'acl': 'acl',
'cors': 'get_cors()',
'defaultObjectAcl': 'get_default_object_acl()',
'etag': 'etag',
'id': 'id',
'lifecycle': 'get_lifecycle()',
'location': 'location',
'logging': 'get_logging()',
'metageneration': 'metageneration',
'name': 'name',
'owner': 'owner',
'projectNumber': 'project_number',
'selfLink': 'self_link',
'storageClass': 'storage_class',
'timeCreated': 'time_created',
'versioning': 'versioning_enabled',
}
"""Map field name -> accessor for fields w/ custom accessors."""

# ACL rules are lazily retrieved.
_acl = _default_object_acl = None

Expand Down Expand Up @@ -590,6 +570,7 @@ def get_logging(self):
:returns: a dict w/ keys, ``logBucket`` and ``logObjectPrefix``
(if logging is enabled), or None (if not).
"""
self._reload_properties()
info = self.properties.get('logging')
if info is not None:
return info.copy()
Expand Down
50 changes: 6 additions & 44 deletions gcloud/storage/test__helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,9 @@ def _getTargetClass(self):
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)

def _derivedClass(self, connection=None, path=None, **custom_fields):
def _derivedClass(self, connection=None, path=None):

class Derived(self._getTargetClass()):
CUSTOM_PROPERTY_ACCESSORS = custom_fields

@property
def connection(self):
Expand All @@ -39,18 +38,14 @@ def path(self):

return Derived

def test_connetction_is_abstract(self):
def test_connection_is_abstract(self):
mixin = self._makeOne()
self.assertRaises(NotImplementedError, lambda: mixin.connection)

def test_path_is_abstract(self):
mixin = self._makeOne()
self.assertRaises(NotImplementedError, lambda: mixin.path)

def test_properties_eager(self):
derived = self._derivedClass()(properties={'extant': False})
self.assertEqual(derived.properties, {'extant': False})

def test_batch(self):
connection = _Connection({'foo': 'Qux', 'bar': 'Baz'})
derived = self._derivedClass(connection, '/path')()
Expand All @@ -65,9 +60,11 @@ def test_batch(self):
self.assertEqual(kw[0]['data'], {'foo': 'Qux', 'bar': 'Baz'})
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})

def test_properties_lazy(self):
def test_properties_no_fetch(self):
connection = _Connection({'foo': 'Foo'})
derived = self._derivedClass(connection, '/path')()
self.assertEqual(derived.properties, {})
derived._reload_properties()
self.assertEqual(derived.properties, {'foo': 'Foo'})
kw = connection._requested
self.assertEqual(len(kw), 1)
Expand All @@ -86,40 +83,6 @@ def test__reload_properties(self):
self.assertEqual(kw[0]['path'], '/path')
self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'})

def test__get_property_eager_hit(self):
derived = self._derivedClass()(properties={'foo': 'Foo'})
self.assertEqual(derived._get_property('foo'), 'Foo')

def test__get_property_eager_miss_w_default(self):
connection = _Connection({'foo': 'Foo'})
derived = self._derivedClass(connection, '/path')()
default = object()
self.assertTrue(derived._get_property('nonesuch', default) is default)
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'GET')
self.assertEqual(kw[0]['path'], '/path')
self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'})

def test__get_property_lazy_hit(self):
connection = _Connection({'foo': 'Foo'})
derived = self._derivedClass(connection, '/path')()
self.assertTrue(derived._get_property('nonesuch') is None)
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'GET')
self.assertEqual(kw[0]['path'], '/path')
self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'})

def test__get_property_w_custom_field(self):
derived = self._derivedClass(foo='get_foo')()
try:
derived._get_property('foo')
except KeyError as e:
self.assertTrue('get_foo' in str(e))
else: # pragma: NO COVER
self.assert_('KeyError not raised')

def test__patch_properties(self):
connection = _Connection({'foo': 'Foo'})
derived = self._derivedClass(connection, '/path')()
Expand All @@ -141,11 +104,10 @@ def _getTargetClass(self):
def _makeOne(self, wrapped):
return self._getTargetClass()(wrapped)

def _makeWrapped(self, connection=None, path=None, **custom_fields):
def _makeWrapped(self, connection=None, path=None):
from gcloud.storage._helpers import _PropertyMixin

class Wrapped(_PropertyMixin):
CUSTOM_PROPERTY_ACCESSORS = custom_fields

@property
def connection(self):
Expand Down
95 changes: 59 additions & 36 deletions gcloud/storage/test_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -617,6 +617,7 @@ def test_get_cors_lazy(self):
after = {'cors': [CORS_ENTRY]}
connection = _Connection(after)
bucket = self._makeOne(connection, NAME)
bucket._reload_properties()
entries = bucket.get_cors()
self.assertEqual(len(entries), 1)
self.assertEqual(entries[0]['maxAgeSeconds'],
Expand Down Expand Up @@ -709,6 +710,7 @@ def test_get_lifecycle_lazy(self):
after = {'lifecycle': {'rule': [LC_RULE]}}
connection = _Connection(after)
bucket = self._makeOne(connection, NAME)
bucket._reload_properties()
entries = bucket.get_lifecycle()
self.assertEqual(len(entries), 1)
self.assertEqual(entries[0]['action']['type'], 'Delete')
Expand Down Expand Up @@ -760,30 +762,22 @@ def test_location_setter(self):
self.assertEqual(kw[0]['data'], {'location': 'AS'})
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})

def test_get_logging_eager_w_prefix(self):
def test_get_logging_w_prefix(self):
NAME = 'name'
LOG_BUCKET = 'logs'
LOG_PREFIX = 'pfx'
before = {
'logging': {'logBucket': LOG_BUCKET,
'logObjectPrefix': LOG_PREFIX}}
connection = _Connection()
bucket = self._makeOne(connection, NAME, before)
info = bucket.get_logging()
self.assertEqual(info['logBucket'], LOG_BUCKET)
self.assertEqual(info['logObjectPrefix'], LOG_PREFIX)
kw = connection._requested
self.assertEqual(len(kw), 0)

def test_get_logging_lazy_wo_prefix(self):
NAME = 'name'
LOG_BUCKET = 'logs'
after = {'logging': {'logBucket': LOG_BUCKET}}
connection = _Connection(after)
'logging': {
'logBucket': LOG_BUCKET,
'logObjectPrefix': LOG_PREFIX,
},
}
resp_to_reload = before
connection = _Connection(resp_to_reload)
bucket = self._makeOne(connection, NAME)
info = bucket.get_logging()
self.assertEqual(info['logBucket'], LOG_BUCKET)
self.assertEqual(info.get('logObjectPrefix'), None)
self.assertEqual(info['logObjectPrefix'], LOG_PREFIX)
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'GET')
Expand All @@ -794,57 +788,85 @@ def test_enable_logging_defaults(self):
NAME = 'name'
LOG_BUCKET = 'logs'
before = {'logging': None}
after = {'logging': {'logBucket': LOG_BUCKET, 'logObjectPrefix': ''}}
connection = _Connection(after)
resp_to_reload = before
resp_to_enable_logging = {
'logging': {'logBucket': LOG_BUCKET, 'logObjectPrefix': ''},
}
connection = _Connection(resp_to_reload, resp_to_enable_logging,
resp_to_enable_logging)
bucket = self._makeOne(connection, NAME, before)
self.assertTrue(bucket.get_logging() is None)
bucket.enable_logging(LOG_BUCKET)
info = bucket.get_logging()
self.assertEqual(info['logBucket'], LOG_BUCKET)
self.assertEqual(info['logObjectPrefix'], '')
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'PATCH')
self.assertEqual(len(kw), 3)
self.assertEqual(kw[0]['method'], 'GET')
self.assertEqual(kw[0]['path'], '/b/%s' % NAME)
self.assertEqual(kw[0]['data'], after)
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})
self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'})
self.assertEqual(kw[1]['method'], 'PATCH')
self.assertEqual(kw[1]['path'], '/b/%s' % NAME)
self.assertEqual(kw[1]['data'], resp_to_enable_logging)
self.assertEqual(kw[1]['query_params'], {'projection': 'full'})
self.assertEqual(kw[2]['method'], 'GET')
self.assertEqual(kw[2]['path'], '/b/%s' % NAME)
self.assertEqual(kw[2]['query_params'], {'projection': 'noAcl'})

def test_enable_logging_explicit(self):
NAME = 'name'
LOG_BUCKET = 'logs'
LOG_PFX = 'pfx'
before = {'logging': None}
after = {
'logging': {'logBucket': LOG_BUCKET, 'logObjectPrefix': LOG_PFX}}
connection = _Connection(after)
resp_to_reload = before
resp_to_enable_logging = {
'logging': {'logBucket': LOG_BUCKET, 'logObjectPrefix': LOG_PFX},
}
connection = _Connection(resp_to_reload,
resp_to_enable_logging,
resp_to_enable_logging)
bucket = self._makeOne(connection, NAME, before)
self.assertTrue(bucket.get_logging() is None)
bucket.enable_logging(LOG_BUCKET, LOG_PFX)
info = bucket.get_logging()
self.assertEqual(info['logBucket'], LOG_BUCKET)
self.assertEqual(info['logObjectPrefix'], LOG_PFX)
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'PATCH')
self.assertEqual(len(kw), 3)
self.assertEqual(kw[0]['method'], 'GET')
self.assertEqual(kw[0]['path'], '/b/%s' % NAME)
self.assertEqual(kw[0]['data'], after)
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})
self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'})
self.assertEqual(kw[1]['method'], 'PATCH')
self.assertEqual(kw[1]['path'], '/b/%s' % NAME)
self.assertEqual(kw[1]['data'], resp_to_enable_logging)
self.assertEqual(kw[1]['query_params'], {'projection': 'full'})
self.assertEqual(kw[2]['method'], 'GET')
self.assertEqual(kw[2]['path'], '/b/%s' % NAME)
self.assertEqual(kw[2]['query_params'], {'projection': 'noAcl'})

def test_disable_logging(self):
NAME = 'name'
before = {'logging': {'logBucket': 'logs', 'logObjectPrefix': 'pfx'}}
after = {'logging': None}
connection = _Connection(after)
resp_to_reload = before
resp_to_disable_logging = {'logging': None}
connection = _Connection(resp_to_reload, resp_to_disable_logging,
resp_to_disable_logging)
bucket = self._makeOne(connection, NAME, before)
self.assertTrue(bucket.get_logging() is not None)
bucket.disable_logging()
self.assertTrue(bucket.get_logging() is None)
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'PATCH')
self.assertEqual(len(kw), 3)
self.assertEqual(kw[0]['method'], 'GET')
self.assertEqual(kw[0]['path'], '/b/%s' % NAME)
self.assertEqual(kw[0]['data'], {'logging': None})
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})
self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'})
self.assertEqual(kw[1]['method'], 'PATCH')
self.assertEqual(kw[1]['path'], '/b/%s' % NAME)
self.assertEqual(kw[1]['data'], {'logging': None})
self.assertEqual(kw[1]['query_params'], {'projection': 'full'})
self.assertEqual(kw[2]['method'], 'GET')
self.assertEqual(kw[2]['path'], '/b/%s' % NAME)
self.assertEqual(kw[2]['query_params'], {'projection': 'noAcl'})

def test_metageneration(self):
METAGENERATION = 42
Expand Down Expand Up @@ -888,6 +910,7 @@ def test_versioning_enabled_getter_missing(self):
NAME = 'name'
connection = _Connection({})
bucket = self._makeOne(connection, NAME)
bucket._reload_properties()
self.assertEqual(bucket.versioning_enabled, False)
kw = connection._requested
self.assertEqual(len(kw), 1)
Expand Down
Loading