diff --git a/gcloud/connection.py b/gcloud/connection.py index 35855e89b445..c8911f3c6384 100644 --- a/gcloud/connection.py +++ b/gcloud/connection.py @@ -41,4 +41,3 @@ def http(self): if self._credentials: self._http = self._credentials.authorize(self._http) return self._http - diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index 45b8f39e3af5..0de26dddcb2e 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -69,6 +69,7 @@ def get_connection(client_email, private_key_path): client_email, private_key_path, scope=SCOPE) return Connection(credentials=credentials) + def get_dataset(dataset_id, client_email, private_key_path): """Shortcut method to establish a connection to a particular dataset in the Cloud Datastore. diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index a009045e591e..4574ba836ee9 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -49,7 +49,7 @@ def _request(self, dataset_id, method, data): headers = { 'Content-Type': 'application/x-protobuf', 'Content-Length': str(len(data)), - } + } headers, content = self.http.request( uri=self.build_api_url(dataset_id=dataset_id, method=method), method='POST', headers=headers, body=data) @@ -132,7 +132,8 @@ def begin_transaction(self, dataset_id, serializable=False): request = datastore_pb.BeginTransactionRequest() if serializable: - request.isolation_level = datastore_pb.BeginTransactionRequest.SERIALIZABLE + request.isolation_level = ( + datastore_pb.BeginTransactionRequest.SERIALIZABLE) else: request.isolation_level = datastore_pb.BeginTransactionRequest.SNAPSHOT @@ -202,7 +203,8 @@ def run_query(self, dataset_id, query_pb, namespace=None): request.partition_id.namespace = namespace request.query.CopyFrom(query_pb) - response = self._rpc(dataset_id, 'runQuery', request, datastore_pb.RunQueryResponse) + response = self._rpc(dataset_id, 'runQuery', request, + datastore_pb.RunQueryResponse) return [e.entity for e in response.batch.entity_result] def lookup(self, dataset_id, key_pbs): diff --git a/gcloud/datastore/dataset.py b/gcloud/datastore/dataset.py index b0dcc65d711e..556a82a8f08e 100644 --- a/gcloud/datastore/dataset.py +++ b/gcloud/datastore/dataset.py @@ -89,8 +89,10 @@ def get_entities(self, keys): # This import is here to avoid circular references. from gcloud.datastore.entity import Entity - entity_pbs = self.connection().lookup(dataset_id=self.id(), - key_pbs=[k.to_protobuf() for k in keys]) + entity_pbs = self.connection().lookup( + dataset_id=self.id(), + key_pbs=[k.to_protobuf() for k in keys] + ) entities = [] for entity_pb in entity_pbs: diff --git a/gcloud/datastore/demo/__init__.py b/gcloud/datastore/demo/__init__.py index e717f978a88b..9de811485b0c 100644 --- a/gcloud/datastore/demo/__init__.py +++ b/gcloud/datastore/demo/__init__.py @@ -5,10 +5,11 @@ __all__ = ['get_dataset', 'CLIENT_EMAIL', 'DATASET_ID', 'PRIVATE_KEY_PATH'] -CLIENT_EMAIL = '754762820716-gimou6egs2hq1rli7el2t621a1b04t9i@developer.gserviceaccount.com' +CLIENT_EMAIL = ('754762820716-gimou6egs2hq1rli7el2t621a1b04t9i' + '@developer.gserviceaccount.com') DATASET_ID = 'gcloud-datastore-demo' PRIVATE_KEY_PATH = os.path.join(os.path.dirname(__file__), 'demo.key') -def get_dataset(): #pragma NO COVER +def get_dataset(): # pragma NO COVER return datastore.get_dataset(DATASET_ID, CLIENT_EMAIL, PRIVATE_KEY_PATH) diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 28045f8d2157..5ffdb6122f22 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -43,19 +43,22 @@ class Entity(dict): >>> dataset.entity('MyEntityKind') - - :func:`gcloud.datastore.dataset.Dataset.get_entity` to retrive an existing entity. + - :func:`gcloud.datastore.dataset.Dataset.get_entity` + to retrieve an existing entity. >>> dataset.get_entity(key) - You can the set values on the entity just like you would on any other dictionary. + You can the set values on the entity + just like you would on any other dictionary. >>> entity['age'] = 20 >>> entity['name'] = 'JJ' >>> entity - And you can cast an entity to a regular Python dictionary with the `dict` builtin: + And you can cast an entity to a regular Python dictionary + with the `dict` builtin: >>> dict(entity) {'age': 20, 'name': 'JJ'} @@ -68,7 +71,7 @@ def __init__(self, dataset=None, kind=None): self._key = None def dataset(self): - """Get the :class:`gcloud.datastore.dataset.Dataset` in which this entity belonds. + """Get the :class:`gcloud.datastore.dataset.Dataset` in which this entity belongs. .. note:: This is based on the :class:`gcloud.datastore.key.Key` set on the entity. @@ -121,7 +124,8 @@ def from_key(cls, key): :type key: :class:`gcloud.datastore.key.Key` :param key: The key for the entity. - :returns: The :class:`Entity` derived from the :class:`gcloud.datastore.key.Key`. + :returns: The :class:`Entity` derived from the + :class:`gcloud.datastore.key.Key`. """ return cls().key(key) @@ -135,7 +139,8 @@ def from_protobuf(cls, pb, dataset=None): :type key: :class:`gcloud.datastore.datastore_v1_pb2.Entity` :param key: The Protobuf representing the entity. - :returns: The :class:`Entity` derived from the :class:`gcloud.datastore.datastore_v1_pb2.Entity`. + :returns: The :class:`Entity` derived from the + :class:`gcloud.datastore.datastore_v1_pb2.Entity`. """ # This is here to avoid circular imports. @@ -205,9 +210,10 @@ def delete(self): self.dataset().connection().delete_entity( dataset_id=self.dataset().id(), key_pb=self.key().to_protobuf()) - def __repr__(self): #pragma NO COVER + def __repr__(self): # pragma NO COVER # An entity should have a key all the time (even if it's partial). if self.key(): - return '' % (self.key().path(), super(Entity, self).__repr__()) + return '' % (self.key().path(), + super(Entity, self).__repr__()) else: return '' % (super(Entity, self).__repr__()) diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 4edc07b38828..fc01acd17a66 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -21,7 +21,7 @@ def __init__(self, dataset=None, namespace=None, path=None): :type path: sequence of dicts :param path: Each dict must have keys 'kind' (a string) and optionally - 'name' (a string) or 'id' (an integer). + 'name' (a string) or 'id' (an integer). """ self._dataset = dataset self._namespace = namespace @@ -267,7 +267,7 @@ def id_or_name(self): """ return self.id() or self.name() - def parent(self):#pragma NO COVER + def parent(self): # pragma NO COVER """Getter: return a new key for the next highest element in path. :rtype: :class:`gcloud.datastore.key.Key` @@ -278,5 +278,5 @@ def parent(self):#pragma NO COVER return None return self.path(self.path()[:-1]) - def __repr__(self): #pragma NO COVER + def __repr__(self): # pragma NO COVER return '' % self.path() diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 821ee4ba9533..4b67333b3cbe 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -47,7 +47,7 @@ class Query(object): '>': datastore_pb.PropertyFilter.GREATER_THAN, '>=': datastore_pb.PropertyFilter.GREATER_THAN_OR_EQUAL, '=': datastore_pb.PropertyFilter.EQUAL, - } + } """Mapping of operator strings and their protobuf equivalents.""" def __init__(self, kind=None, dataset=None): diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index 026c939ce9c2..cb36622dc42d 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -1,5 +1,6 @@ import unittest2 + class Test_get_connection(unittest2.TestCase): def _callFUT(self, client_email, private_key_path): @@ -13,6 +14,7 @@ def test_it(self): from gcloud.datastore.connection import Connection from gcloud.test_credentials import _Client from gcloud.test_credentials import _Monkey + CLIENT_EMAIL = 'phred@example.com' PRIVATE_KEY = 'SEEkR1t' client = _Client() @@ -27,7 +29,8 @@ def test_it(self): {'service_account_name': CLIENT_EMAIL, 'private_key': PRIVATE_KEY, 'scope': SCOPE, - }) + }) + class Test_get_dataset(unittest2.TestCase): @@ -43,6 +46,7 @@ def test_it(self): from gcloud.datastore.dataset import Dataset from gcloud.test_credentials import _Client from gcloud.test_credentials import _Monkey + CLIENT_EMAIL = 'phred@example.com' PRIVATE_KEY = 'SEEkR1t' DATASET_ID = 'DATASET' @@ -59,4 +63,4 @@ def test_it(self): {'service_account_name': CLIENT_EMAIL, 'private_key': PRIVATE_KEY, 'scope': SCOPE, - }) + }) diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index ee151c261d0b..51bd41e7f63c 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -1,9 +1,11 @@ import unittest2 + class TestConnection(unittest2.TestCase): def _getTargetClass(self): from gcloud.datastore.connection import Connection + return Connection def _makeOne(self, *args, **kw): @@ -25,13 +27,17 @@ def test_http_w_existing(self): def test_http_wo_creds(self): from httplib2 import Http + conn = self._makeOne() self.assertTrue(isinstance(conn.http, Http)) def test_http_w_creds(self): from httplib2 import Http + authorized = object() + class Creds(object): + def authorize(self, http): self._called_with = http return authorized @@ -51,18 +57,17 @@ def test__request_w_200(self): 'datasets', DATASET_ID, METHOD, - ]) + ]) http = conn._http = Http({'status': '200'}, 'CONTENT') self.assertEqual(conn._request(DATASET_ID, METHOD, DATA), 'CONTENT') self.assertEqual(http._called_with, {'uri': URI, 'method': 'POST', - 'headers': - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '4', - }, + 'headers': {'Content-Type': 'application/x-protobuf', + 'Content-Length': '4', + }, 'body': DATA, - }) + }) def test__request_not_200(self): DATASET_ID = 'DATASET' @@ -78,12 +83,15 @@ def test__request_not_200(self): def test__rpc(self): class ReqPB(object): + def SerializeToString(self): return b'REQPB' class RspPB(object): + def __init__(self, pb): self._pb = pb + @classmethod def FromString(cls, pb): return cls(pb) @@ -97,7 +105,7 @@ def FromString(cls, pb): 'datasets', DATASET_ID, METHOD, - ]) + ]) http = conn._http = Http({'status': '200'}, 'CONTENT') response = conn._rpc(DATASET_ID, METHOD, ReqPB(), RspPB) self.assertTrue(isinstance(response, RspPB)) @@ -105,12 +113,11 @@ def FromString(cls, pb): self.assertEqual(http._called_with, {'uri': URI, 'method': 'POST', - 'headers': - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '5', - }, + 'headers': {'Content-Type': 'application/x-protobuf', + 'Content-Length': '5', + }, 'body': b'REQPB', - }) + }) def test_build_api_url_w_default_base_version(self): DATASET_ID = 'DATASET' @@ -122,7 +129,7 @@ def test_build_api_url_w_default_base_version(self): 'datasets', DATASET_ID, METHOD, - ]) + ]) self.assertEqual(klass.build_api_url(DATASET_ID, METHOD), URI) def test_build_api_url_w_explicit_base_version(self): @@ -137,7 +144,7 @@ def test_build_api_url_w_explicit_base_version(self): 'datasets', DATASET_ID, METHOD, - ]) + ]) self.assertEqual(klass.build_api_url(DATASET_ID, METHOD, BASE, VER), URI) @@ -153,6 +160,7 @@ def test_transaction_setter(self): def test_mutation_wo_transaction(self): from gcloud.datastore.connection import datastore_pb + class Mutation(object): pass conn = self._makeOne() @@ -161,8 +169,10 @@ class Mutation(object): self.assertTrue(isinstance(found, Mutation)) def test_mutation_w_transaction(self): + class Mutation(object): pass + class Xact(object): def mutation(self): return Mutation() @@ -186,6 +196,7 @@ def test_begin_transaction_w_existing_transaction(self): def test_begin_transaction_default_serialize(self): from gcloud.datastore.connection import datastore_pb + xact = object() DATASET_ID = 'DATASET' TRANSACTION = 'TRANSACTION' @@ -198,16 +209,16 @@ def test_begin_transaction_default_serialize(self): 'datasets', DATASET_ID, 'beginTransaction', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.begin_transaction(DATASET_ID), TRANSACTION) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '2', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '2', + }) rq_class = datastore_pb.BeginTransactionRequest request = rq_class() request.ParseFromString(cw['body']) @@ -215,6 +226,7 @@ def test_begin_transaction_default_serialize(self): def test_begin_transaction_explicit_serialize(self): from gcloud.datastore.connection import datastore_pb + xact = object() DATASET_ID = 'DATASET' TRANSACTION = 'TRANSACTION' @@ -227,16 +239,16 @@ def test_begin_transaction_explicit_serialize(self): 'datasets', DATASET_ID, 'beginTransaction', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.begin_transaction(DATASET_ID, True), TRANSACTION) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '2', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '2', + }) rq_class = datastore_pb.BeginTransactionRequest request = rq_class() request.ParseFromString(cw['body']) @@ -249,7 +261,9 @@ def test_rollback_transaction_wo_existing_transaction(self): conn.rollback_transaction, DATASET_ID) def test_rollback_transaction_w_existing_transaction_no_id(self): + class Xact(object): + def id(self): return None DATASET_ID = 'DATASET' @@ -262,7 +276,9 @@ def test_rollback_transaction_ok(self): from gcloud.datastore.connection import datastore_pb DATASET_ID = 'DATASET' TRANSACTION = 'xact' + class Xact(object): + def id(self): return TRANSACTION rsp_pb = datastore_pb.RollbackResponse() @@ -274,16 +290,16 @@ def id(self): 'datasets', DATASET_ID, 'rollback', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.rollback_transaction(DATASET_ID), None) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '6', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '6', + }) rq_class = datastore_pb.RollbackRequest request = rq_class() request.ParseFromString(cw['body']) @@ -292,6 +308,7 @@ def id(self): def test_run_query_wo_namespace_empty_result(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.query import Query + DATASET_ID = 'DATASET' KIND = 'Nonesuch' q_pb = Query(KIND, DATASET_ID).to_protobuf() @@ -303,16 +320,16 @@ def test_run_query_wo_namespace_empty_result(self): 'datasets', DATASET_ID, 'runQuery', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.run_query(DATASET_ID, q_pb), []) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '14', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '14', + }) rq_class = datastore_pb.RunQueryRequest request = rq_class() request.ParseFromString(cw['body']) @@ -322,6 +339,7 @@ def test_run_query_wo_namespace_empty_result(self): def test_run_query_w_namespace_nonempty_result(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.query import Query + DATASET_ID = 'DATASET' KIND = 'Kind' entity = datastore_pb.Entity() @@ -329,7 +347,7 @@ def test_run_query_w_namespace_nonempty_result(self): rsp_pb = datastore_pb.RunQueryResponse() rsp_pb.batch.entity_result.add(entity=entity) rsp_pb.batch.entity_result_type = 1 # FULL - rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS + rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS conn = self._makeOne() URI = '/'.join([conn.API_BASE_URL, 'datastore', @@ -337,17 +355,17 @@ def test_run_query_w_namespace_nonempty_result(self): 'datasets', DATASET_ID, 'runQuery', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.run_query(DATASET_ID, q_pb, 'NS') - returned, = result # one entity + returned, = result # One entity. cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '16', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '16', + }) rq_class = datastore_pb.RunQueryRequest request = rq_class() request.ParseFromString(cw['body']) @@ -358,6 +376,7 @@ def test_lookup_single_key_empty_response(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() @@ -369,16 +388,16 @@ def test_lookup_single_key_empty_response(self): 'datasets', DATASET_ID, 'lookup', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.lookup(DATASET_ID, key_pb), None) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '26', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '26', + }) rq_class = datastore_pb.LookupRequest request = rq_class() request.ParseFromString(cw['body']) @@ -390,6 +409,7 @@ def test_lookup_single_key_nonempty_response(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() @@ -404,7 +424,7 @@ def test_lookup_single_key_nonempty_response(self): 'datasets', DATASET_ID, 'lookup', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) found = conn.lookup(DATASET_ID, key_pb) self.assertEqual(found.key.path_element[0].kind, 'Kind') @@ -413,9 +433,9 @@ def test_lookup_single_key_nonempty_response(self): self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '26', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '26', + }) rq_class = datastore_pb.LookupRequest request = rq_class() request.ParseFromString(cw['body']) @@ -427,6 +447,7 @@ def test_lookup_multiple_keys_empty_response(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + DATASET_ID = 'DATASET' key_pb1 = Key(dataset=Dataset(DATASET_ID), path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() @@ -440,16 +461,16 @@ def test_lookup_multiple_keys_empty_response(self): 'datasets', DATASET_ID, 'lookup', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.lookup(DATASET_ID, [key_pb1, key_pb2]), []) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '52', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '52', + }) rq_class = datastore_pb.LookupRequest request = rq_class() request.ParseFromString(cw['body']) @@ -462,9 +483,10 @@ def test_commit_wo_transaction(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() mutation = datastore_pb.Mutation() insert = mutation.upsert.add() @@ -479,7 +501,7 @@ def test_commit_wo_transaction(self): 'datasets', DATASET_ID, 'commit', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.commit(DATASET_ID, mutation) self.assertEqual(result.index_updates, 0) @@ -488,9 +510,9 @@ def test_commit_wo_transaction(self): self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '47', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '47', + }) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -502,12 +524,13 @@ def test_commit_w_transaction(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + class Xact(object): def id(self): return 'xact' DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() mutation = datastore_pb.Mutation() insert = mutation.upsert.add() @@ -523,7 +546,7 @@ def id(self): 'datasets', DATASET_ID, 'commit', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.commit(DATASET_ID, mutation) self.assertEqual(result.index_updates, 0) @@ -532,9 +555,9 @@ def id(self): self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '53', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '53', + }) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -546,9 +569,10 @@ def test_save_entity_wo_transaction_w_upsert(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() URI = '/'.join([conn.API_BASE_URL, @@ -557,7 +581,7 @@ def test_save_entity_wo_transaction_w_upsert(self): 'datasets', DATASET_ID, 'commit', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.save_entity(DATASET_ID, key_pb, {'foo': 'Foo'}) self.assertEqual(result, True) @@ -565,9 +589,9 @@ def test_save_entity_wo_transaction_w_upsert(self): self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '47', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '47', + }) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -589,11 +613,12 @@ def test_save_entity_wo_transaction_w_auto_id(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind'}]).to_protobuf() + path=[{'kind': 'Kind'}]).to_protobuf() updated_key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() mr_pb = rsp_pb.mutation_result mr_pb.index_updates = 0 @@ -606,7 +631,7 @@ def test_save_entity_wo_transaction_w_auto_id(self): 'datasets', DATASET_ID, 'commit', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.save_entity(DATASET_ID, key_pb, {'foo': 'Foo'}) self.assertEqual(result, updated_key_pb) @@ -614,9 +639,9 @@ def test_save_entity_wo_transaction_w_auto_id(self): self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '44', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '44', + }) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -639,15 +664,19 @@ def test_save_entity_w_transaction(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + mutation = datastore_pb.Mutation() + class Xact(object): + def id(self): return 'xact' + def mutation(self): return mutation DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() conn.transaction(Xact()) @@ -657,7 +686,7 @@ def mutation(self): 'datasets', DATASET_ID, 'commit', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.save_entity(DATASET_ID, key_pb, {'foo': 'Foo'}) self.assertEqual(result, True) @@ -669,9 +698,10 @@ def test_delete_entities_wo_transaction(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() URI = '/'.join([conn.API_BASE_URL, @@ -679,8 +709,7 @@ def test_delete_entities_wo_transaction(self): conn.API_VERSION, 'datasets', DATASET_ID, - 'commit', - ]) + 'commit', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.delete_entities(DATASET_ID, [key_pb]) self.assertEqual(result.index_updates, 0) @@ -689,9 +718,9 @@ def test_delete_entities_wo_transaction(self): self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '30', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '30', + }) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -709,15 +738,19 @@ def test_delete_entities_w_transaction(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + mutation = datastore_pb.Mutation() + class Xact(object): + def id(self): return 'xact' + def mutation(self): return mutation DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() conn.transaction(Xact()) @@ -727,7 +760,7 @@ def mutation(self): 'datasets', DATASET_ID, 'commit', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.delete_entities(DATASET_ID, [key_pb]) self.assertEqual(result, True) @@ -739,9 +772,10 @@ def test_delete_entity_wo_transaction(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() URI = '/'.join([conn.API_BASE_URL, @@ -750,7 +784,7 @@ def test_delete_entity_wo_transaction(self): 'datasets', DATASET_ID, 'commit', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.delete_entity(DATASET_ID, key_pb) self.assertEqual(result.index_updates, 0) @@ -759,9 +793,9 @@ def test_delete_entity_wo_transaction(self): self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers'], - {'Content-Type': 'application/x-protobuf', - 'Content-Length': '30', - }) + {'Content-Type': 'application/x-protobuf', + 'Content-Length': '30', + }) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) @@ -779,15 +813,19 @@ def test_delete_entity_w_transaction(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + mutation = datastore_pb.Mutation() + class Xact(object): + def id(self): return 'xact' + def mutation(self): return mutation DATASET_ID = 'DATASET' key_pb = Key(dataset=Dataset(DATASET_ID), - path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() conn.transaction(Xact()) @@ -797,7 +835,7 @@ def mutation(self): 'datasets', DATASET_ID, 'commit', - ]) + ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.delete_entity(DATASET_ID, key_pb) self.assertEqual(result, True) @@ -805,6 +843,7 @@ def mutation(self): mutation = conn.mutation() self.assertEqual(len(mutation.delete), 1) + class Http(object): _called_with = None diff --git a/gcloud/datastore/test_dataset.py b/gcloud/datastore/test_dataset.py index 9cc93cfb4710..a734512119d0 100644 --- a/gcloud/datastore/test_dataset.py +++ b/gcloud/datastore/test_dataset.py @@ -1,7 +1,6 @@ import unittest2 - class TestDataset(unittest2.TestCase): def _getTargetClass(self): @@ -119,8 +118,10 @@ def test_get_entity_hit(self): class _Connection(object): _called_with = None + def __init__(self, *result): self._result = list(result) + def lookup(self, **kw): self._called_with = kw return self._result diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index 039b9b848295..21d45f6540c1 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -11,10 +11,12 @@ class TestEntity(unittest2.TestCase): def _getTargetClass(self): from gcloud.datastore.entity import Entity + return Entity def _makeOne(self, dataset=_MARKER, kind=_KIND): from gcloud.datastore.dataset import Dataset + klass = self._getTargetClass() if dataset is _MARKER: dataset = Dataset(_DATASET_ID) @@ -31,12 +33,14 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): from gcloud.datastore.dataset import Dataset + dataset = Dataset(_DATASET_ID) entity = self._makeOne(dataset, _KIND) self.assertTrue(entity.dataset() is dataset) def test_key_getter(self): from gcloud.datastore.key import Key + entity = self._makeOne() key = entity.key() self.assertIsInstance(key, Key) @@ -52,6 +56,7 @@ def test_key_setter(self): def test_from_key(self): from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + klass = self._getTargetClass() dataset = Dataset(_DATASET_ID) key = Key(dataset=dataset).kind(_KIND).id(_ID) @@ -65,6 +70,7 @@ def test_from_key(self): def test_from_protobuf(self): from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore.dataset import Dataset + entity_pb = datastore_pb.Entity() entity_pb.key.partition_id.dataset_id = _DATASET_ID entity_pb.key.path_element.add(kind=_KIND, id=_ID) @@ -88,7 +94,7 @@ def test_reload_miss(self): entity = self._makeOne() entity.key(key) entity['foo'] = 'Foo' - # does not raise, does not update on miss + # Does not raise, does not update on miss. self.assertTrue(entity.reload() is entity) self.assertEqual(entity['foo'], 'Foo') @@ -179,44 +185,61 @@ class _Key(object): _key = 'KEY' _partial = False _path = None + def __init__(self, dataset): self._dataset = dataset + def dataset(self): return self._dataset + def to_protobuf(self): return self._key + def is_partial(self): return self._partial + def path(self, path): self._path = path + class _Dataset(dict): def __init__(self, connection=None): self._connection = connection + def id(self): return _DATASET_ID + def connection(self): return self._connection + def get_entity(self, key): return self.get(key) + def get_entities(self, keys): return [self.get(x) for x in keys] + class _Connection(object): _transaction = _saved = _deleted = None _save_result = True + def transaction(self): return self._transaction + def save_entity(self, dataset_id, key_pb, properties): self._saved = (dataset_id, key_pb, properties) return self._save_result + def delete_entity(self, dataset_id, key_pb): self._deleted = (dataset_id, key_pb) + class _Transaction(object): _added = () + def __nonzero__(self): return True __bool__ = __nonzero__ + def add_auto_id_entity(self, entity): self._added += (entity,) diff --git a/gcloud/datastore/test_helpers.py b/gcloud/datastore/test_helpers.py index 139eb02a4784..e976a475668c 100644 --- a/gcloud/datastore/test_helpers.py +++ b/gcloud/datastore/test_helpers.py @@ -5,13 +5,15 @@ class Test_get_protobuf_attribute_and_value(unittest2.TestCase): def _callFUT(self, val): from gcloud.datastore.helpers import get_protobuf_attribute_and_value + return get_protobuf_attribute_and_value(val) def test_datetime_naive(self): import calendar import datetime import pytz - naive = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375) # no zone + + naive = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375) # No zone. utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, pytz.utc) name, value = self._callFUT(naive) self.assertEqual(name, 'timestamp_microseconds_value') @@ -22,6 +24,7 @@ def test_datetime_w_zone(self): import calendar import datetime import pytz + utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, pytz.utc) name, value = self._callFUT(utc) self.assertEqual(name, 'timestamp_microseconds_value') @@ -31,6 +34,7 @@ def test_datetime_w_zone(self): def test_key(self): from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + _DATASET = 'DATASET' _KIND = 'KIND' _ID = 1234 @@ -84,10 +88,12 @@ class Test_get_value_from_protobuf(unittest2.TestCase): def _callFUT(self, pb): from gcloud.datastore.helpers import get_value_from_protobuf + return get_value_from_protobuf(pb) def _makePB(self, attr_name, value): from gcloud.datastore.datastore_v1_pb2 import Property + prop = Property() setattr(prop.value, attr_name, value) return prop @@ -96,7 +102,8 @@ def test_datetime(self): import calendar import datetime import pytz - naive = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375) # no zone + + naive = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375) # No zone. utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, pytz.utc) micros = (calendar.timegm(utc.timetuple()) * 1000000) + 4375 pb = self._makePB('timestamp_microseconds_value', micros) @@ -106,6 +113,7 @@ def test_key(self): from gcloud.datastore.datastore_v1_pb2 import Property from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + _DATASET = 'DATASET' _KIND = 'KIND' _ID = 1234 @@ -139,4 +147,4 @@ def test_unicode(self): def test_unknown(self): from gcloud.datastore.datastore_v1_pb2 import Property pb = Property() - self.assertEqual(self._callFUT(pb), None) # XXX desirable? + self.assertEqual(self._callFUT(pb), None) # XXX desirable? diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index 0b7a441a68cc..b722b326ac48 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -176,7 +176,7 @@ def test_from_path_single_element(self): def test_from_path_three_elements(self): self.assertRaises(ValueError, self._getTargetClass().from_path, - 'abc', 'def', 'ghi') + 'abc', 'def', 'ghi') def test_from_path_two_elements_second_string(self): key = self._getTargetClass().from_path('abc', 'def') @@ -191,9 +191,10 @@ def test_from_path_two_elements_second_int(self): def test_from_path_nested(self): key = self._getTargetClass().from_path('abc', 'def', 'ghi', 123) self.assertEqual(key.kind(), 'ghi') - self.assertEqual(key.path(), [{'kind': 'abc', 'name': 'def'}, - {'kind': 'ghi', 'id': 123}, - ]) + self.assertEqual(key.path(), + [{'kind': 'abc', 'name': 'def'}, + {'kind': 'ghi', 'id': 123}, + ]) def test_is_partial_no_name_or_id(self): key = self._makeOne() diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index 06956e7d9ba2..3d350625b762 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -5,6 +5,7 @@ class TestQuery(unittest2.TestCase): def _getTargetClass(self): from gcloud.datastore.query import Query + return Query def _makeOne(self, kind=None, dataset=None): @@ -18,6 +19,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): from gcloud.datastore.dataset import Dataset + _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' _KIND = 'KIND' @@ -29,6 +31,7 @@ def test_ctor_explicit(self): def test__clone(self): from gcloud.datastore.dataset import Dataset + _DATASET = 'DATASET' _KIND = 'KIND' dataset = Dataset(_DATASET) @@ -63,7 +66,7 @@ def test_filter_w_known_operator(self): self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) q_pb = after.to_protobuf() - self.assertEqual(q_pb.filter.composite_filter.operator, 1) # AND + self.assertEqual(q_pb.filter.composite_filter.operator, 1) # AND f_pb, = list(q_pb.filter.composite_filter.filter) p_pb = f_pb.property_filter self.assertEqual(p_pb.property.name, 'firstname') @@ -84,7 +87,7 @@ def test_ancester_wo_existing_ancestor_query_w_key(self): self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) q_pb = after.to_protobuf() - self.assertEqual(q_pb.filter.composite_filter.operator, 1) # AND + self.assertEqual(q_pb.filter.composite_filter.operator, 1) # AND f_pb, = list(q_pb.filter.composite_filter.filter) p_pb = f_pb.property_filter self.assertEqual(p_pb.property.name, '__key__') @@ -100,7 +103,7 @@ def test_ancester_wo_existing_ancestor_query_w_list(self): self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) q_pb = after.to_protobuf() - self.assertEqual(q_pb.filter.composite_filter.operator, 1) # AND + self.assertEqual(q_pb.filter.composite_filter.operator, 1) # AND f_pb, = list(q_pb.filter.composite_filter.filter) p_pb = f_pb.property_filter self.assertEqual(p_pb.property.name, '__key__') @@ -141,7 +144,7 @@ def test_kind_setter_w_existing(self): self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) self.assertTrue(after.dataset() is dataset) - kq_pb1, kq_pb2= list(after.kind()) + kq_pb1, kq_pb2 = list(after.kind()) self.assertEqual(kq_pb1.name, _KIND_BEFORE) self.assertEqual(kq_pb2.name, _KIND_AFTER) @@ -195,7 +198,7 @@ def test_fetch_default_limit(self): self.assertEqual(connection._called_with, {'dataset_id': _DATASET, 'query_pb': query.to_protobuf(), - }) + }) def test_fetch_explicit_limit(self): from gcloud.datastore.datastore_v1_pb2 import Entity @@ -220,23 +223,28 @@ def test_fetch_explicit_limit(self): self.assertEqual(connection._called_with, {'dataset_id': _DATASET, 'query_pb': limited.to_protobuf(), - }) + }) class _Dataset(object): + def __init__(self, id, connection): self._id = id self._connection = connection + def id(self): return self._id + def connection(self): return self._connection class _Connection(object): _called_with = None + def __init__(self, *result): self._result = list(result) + def run_query(self, **kw): self._called_with = kw return self._result diff --git a/gcloud/datastore/test_transaction.py b/gcloud/datastore/test_transaction.py index 4ca7e2d3daa7..b0841489a158 100644 --- a/gcloud/datastore/test_transaction.py +++ b/gcloud/datastore/test_transaction.py @@ -5,6 +5,7 @@ class TestTransaction(unittest2.TestCase): def _getTargetClass(self): from gcloud.datastore.transaction import Transaction + return Transaction def _makeOne(self, dataset=None): @@ -12,6 +13,7 @@ def _makeOne(self, dataset=None): def test_ctor(self): from gcloud.datastore.datastore_v1_pb2 import Mutation + _DATASET = 'DATASET' connection = _Connection() dataset = _Dataset(_DATASET, connection) @@ -89,7 +91,7 @@ def test_commit_w_already(self): xact = self._makeOne(dataset) xact._mutation = mutation = object() xact.begin() - connection.transaction(()) # simulate previous commit via false-ish + connection.transaction(()) # Simulate previous commit via false-ish. xact.commit() self.assertEqual(connection._committed, None) self.assertTrue(connection._xact is None) @@ -125,18 +127,20 @@ class Foo(Exception): self.assertTrue(connection._xact is xact) raise Foo() except Foo: - pass # XXX - #self.assertEqual(xact.id(), None) - #self.assertEqual(connection._rolled_back, _DATASET)) - #self.assertEqual(connection._xact, None) + pass # XXX + # self.assertEqual(xact.id(), None) + # self.assertEqual(connection._rolled_back, _DATASET)) + # self.assertEqual(connection._xact, None) # XXX should *not* have committed self.assertEqual(connection._committed, (_DATASET, mutation)) - #self.assertEqual(connection._committed, None) + # self.assertEqual(connection._committed, None) self.assertTrue(connection._xact is None) self.assertEqual(xact.id(), None) + def _makeKey(kind, id): from gcloud.datastore.datastore_v1_pb2 import Key + key = Key() elem = key.path_element.add() elem.kind = kind @@ -145,11 +149,14 @@ def _makeKey(kind, id): class _Dataset(object): + def __init__(self, id, connection=None): self._id = id self._connection = connection + def id(self): return self._id + def connection(self): return self._connection @@ -157,30 +164,37 @@ def connection(self): class _Connection(object): _marker = object() _begun = _rolled_back = _committed = _xact = None + def __init__(self, xact_id=123): self._xact_id = xact_id self._commit_result = _CommitResult() + def transaction(self, xact=_marker): if xact is self._marker: return self._xact self._xact = xact + def begin_transaction(self, dataset_id): self._begun = dataset_id return self._xact_id + def rollback_transaction(self, dataset_id): self._rolled_back = dataset_id + def commit(self, dataset_id, mutation): self._committed = (dataset_id, mutation) return self._commit_result class _CommitResult(object): + def __init__(self, *new_keys): self.insert_auto_id_key = new_keys class _Key(object): _path = None + def path(self, path): self._path = path return self @@ -188,8 +202,10 @@ def path(self, path): class _Entity(object): _marker = object() + def __init__(self): self._key = _Key() + def key(self, key=_marker): if key is self._marker: return self._key diff --git a/gcloud/storage/__init__.py b/gcloud/storage/__init__.py index b5337f28b333..8d3db4613f91 100644 --- a/gcloud/storage/__init__.py +++ b/gcloud/storage/__init__.py @@ -69,6 +69,7 @@ def get_connection(project, client_email, private_key_path): client_email, private_key_path, scope=SCOPE) return Connection(project=project, credentials=credentials) + def get_bucket(bucket_name, project, client_email, private_key_path): """Shortcut method to establish a connection to a particular bucket. diff --git a/gcloud/storage/acl.py b/gcloud/storage/acl.py index 6078a0c4f54b..3c873bcaecc0 100644 --- a/gcloud/storage/acl.py +++ b/gcloud/storage/acl.py @@ -72,6 +72,7 @@ fields when sending metadata for ACLs to the API. """ + class ACL(object): """Container class representing a list of access controls.""" @@ -82,7 +83,6 @@ class Role(object): Writer = 'WRITER' Owner = 'OWNER' - class Entity(object): """Class representing a set of roles for an entity. @@ -111,9 +111,9 @@ def __str__(self): else: return '{self.type}-{self.identifier}'.format(self=self) - def __repr__(self): #pragma NO COVER + def __repr__(self): # pragma NO COVER return ''.format( - self=self, roles=', '.join(self.roles)) + self=self, roles=', '.join(self.roles)) def get_roles(self): """Get the list of roles permitted by this entity. @@ -181,7 +181,6 @@ def revoke_owner(self): return self.revoke(ACL.Role.Owner) - def __init__(self): self.entities = {} @@ -352,7 +351,7 @@ def get_entities(self): return self.entities.values() - def save(self): #pragma NO COVER + def save(self): # pragma NO COVER """A method to be overridden by subclasses. :raises: NotImplementedError diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index 3292bccf50e6..13567416e7f2 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -40,7 +40,7 @@ def from_dict(cls, bucket_dict, connection=None): return cls(connection=connection, name=bucket_dict['name'], metadata=bucket_dict) - def __repr__(self): #pragma NO COVER + def __repr__(self): # pragma NO COVER return '' % self.name def __iter__(self): @@ -124,7 +124,7 @@ def new_key(self, key): # Support Python 2 and 3. try: string_type = basestring - except NameError: #pragma NO COVER PY3k + except NameError: # pragma NO COVER PY3k string_type = str if isinstance(key, string_type): @@ -191,7 +191,7 @@ def delete_keys(self, keys): for key in keys: self.delete_key(key) - def copy_key(self): #pragma NO COVER + def copy_key(self): # pragma NO COVER raise NotImplementedError def upload_file(self, filename, key=None): @@ -402,12 +402,10 @@ def configure_website(self, main_page_suffix=None, not_found_page=None): :param not_found_page: The file to use when a page isn't found. """ - data = { - 'website': { - 'mainPageSuffix': main_page_suffix, - 'notFoundPage': not_found_page, - } - } + data = {'website': {'mainPageSuffix': main_page_suffix, + 'notFoundPage': not_found_page, + } + } return self.patch_metadata(data) def disable_website(self): diff --git a/gcloud/storage/connection.py b/gcloud/storage/connection.py index 1cf30fbe4cf9..39a682158f81 100644 --- a/gcloud/storage/connection.py +++ b/gcloud/storage/connection.py @@ -160,7 +160,7 @@ def make_request(self, method, url, data=None, content_type=None, headers['Content-Type'] = content_type return self.http.request(uri=url, method=method, headers=headers, - body=data) + body=data) def api_request(self, method, path, query_params=None, data=None, content_type=None, @@ -415,7 +415,7 @@ def new_bucket(self, bucket): # Support Python 2 and 3. try: string_type = basestring - except NameError: #pragma NO COVER PY3k + except NameError: # pragma NO COVER PY3k string_type = str if isinstance(bucket, string_type): @@ -423,7 +423,9 @@ def new_bucket(self, bucket): raise TypeError('Invalid bucket: %s' % bucket) - def generate_signed_url(self, resource, expiration, method='GET', content_md5=None, content_type=None): #pragma NO COVER UGH + def generate_signed_url(self, resource, expiration, + method='GET', content_md5=None, + content_type=None): # pragma NO COVER UGH """Generate a signed URL to provide query-string authentication to a resource. :type resource: string @@ -470,19 +472,20 @@ def generate_signed_url(self, resource, expiration, method='GET', content_md5=No expiration = int(time.mktime(expiration.timetuple())) if not isinstance(expiration, (int, long)): - raise ValueError('Expected an integer timestamp, datetime, or timedelta. ' - 'Got %s' % type(expiration)) + raise ValueError('Expected an integer timestamp, datetime, or ' + 'timedelta. Got %s' % type(expiration)) # Generate the string to sign. signature_string = '\n'.join([ - method, - content_md5 or '', - content_type or '', - str(expiration), - resource]) + method, + content_md5 or '', + content_type or '', + str(expiration), + resource]) # Take our PKCS12 (.p12) key and make it into a RSA key we can use... - pkcs12 = crypto.load_pkcs12(base64.b64decode(self.credentials.private_key), 'notasecret') + pkcs12 = crypto.load_pkcs12(base64.b64decode(self.credentials.private_key), + 'notasecret') pem = crypto.dump_privatekey(crypto.FILETYPE_PEM, pkcs12.get_privatekey()) pem_key = RSA.importKey(pem) diff --git a/gcloud/storage/demo/__init__.py b/gcloud/storage/demo/__init__.py index f78e36eb96ca..156a543144b8 100644 --- a/gcloud/storage/demo/__init__.py +++ b/gcloud/storage/demo/__init__.py @@ -5,10 +5,11 @@ __all__ = ['get_connection', 'CLIENT_EMAIL', 'PRIVATE_KEY_PATH', 'PROJECT'] -CLIENT_EMAIL = '606734090113-6ink7iugcv89da9sru7lii8bs3i0obqg@developer.gserviceaccount.com' +CLIENT_EMAIL = ('606734090113-6ink7iugcv89da9sru7lii8bs3i0obqg@' + 'developer.gserviceaccount.com') PRIVATE_KEY_PATH = os.path.join(os.path.dirname(__file__), 'demo.key') PROJECT = 'gcloud-storage-demo' -def get_connection(): #pragma NO COVER +def get_connection(): # pragma NO COVER. return storage.get_connection(PROJECT, CLIENT_EMAIL, PRIVATE_KEY_PATH) diff --git a/gcloud/storage/demo/demo.py b/gcloud/storage/demo/demo.py index e0eb0c4b0a67..db72823d446d 100644 --- a/gcloud/storage/demo/demo.py +++ b/gcloud/storage/demo/demo.py @@ -8,11 +8,11 @@ connection = demo.get_connection() # OK, now let's look at all of the buckets... -print connection.get_all_buckets() # This might take a second... +print connection.get_all_buckets() # This might take a second... # Now let's create a new bucket... import time -bucket_name = ("bucket-%s" % time.time()).replace(".", "") # Get rid of dots... +bucket_name = ("bucket-%s" % time.time()).replace(".", "") # Get rid of dots. print bucket_name bucket = connection.create_bucket(bucket_name) print bucket diff --git a/gcloud/storage/iterator.py b/gcloud/storage/iterator.py index b2dfcff8ba11..a3c83db4a869 100644 --- a/gcloud/storage/iterator.py +++ b/gcloud/storage/iterator.py @@ -109,7 +109,7 @@ def reset(self): self.page_number = 0 self.next_page_token = None - def get_items_from_response(self, response): #pragma NO COVER + def get_items_from_response(self, response): # pragma NO COVER """Factory method called while iterating. This should be overriden. This method should be overridden by a subclass. diff --git a/gcloud/storage/key.py b/gcloud/storage/key.py index 722e3e393968..490c68cf77fb 100644 --- a/gcloud/storage/key.py +++ b/gcloud/storage/key.py @@ -56,7 +56,7 @@ def from_dict(cls, key_dict, bucket=None): return cls(bucket=bucket, name=key_dict['name'], metadata=key_dict) - def __repr__(self): #pragma NO COVER + def __repr__(self): # pragma NO COVER if self.bucket: bucket_name = self.bucket.name else: @@ -95,7 +95,8 @@ def public_url(self): return '{storage_base_url}/{self.bucket.name}/{self.name}'.format( storage_base_url='http://commondatastorage.googleapis.com', self=self) - def generate_signed_url(self, expiration, method='GET'): #pragma NO COVER UGH + def generate_signed_url(self, expiration, + method='GET'): # pragma NO COVER UGH """Generates a signed URL for this key. If you have a key that you want to allow access to @@ -151,7 +152,7 @@ def get_contents_to_file(self, fh): for chunk in KeyDataIterator(self): try: fh.write(chunk) - except IOError, e: #pragma NO COVER + except IOError, e: # pragma NO COVER if e.errno == errno.ENOSPC: raise Exception('No space left on device.') @@ -208,7 +209,7 @@ def set_contents_from_file(self, fh, rewind=False, size=None, headers = { 'X-Upload-Content-Type': content_type or 'application/unknown', 'X-Upload-Content-Length': total_bytes - } + } upload_url = self.connection.build_api_url( path=self.bucket.path + '/o', @@ -232,9 +233,10 @@ def set_contents_from_file(self, fh, rewind=False, size=None, headers = { 'Content-Range': 'bytes %d-%d/%d' % (start, end, total_bytes), - } + } - response, content = self.connection.make_request(content_type='text/plain', + response, content = self.connection.make_request( + content_type='text/plain', method='POST', url=upload_url, headers=headers, data=data) bytes_uploaded += chunk_size diff --git a/gcloud/storage/test___init__.py b/gcloud/storage/test___init__.py index 46ab3b2fed96..fe640bdfea4d 100644 --- a/gcloud/storage/test___init__.py +++ b/gcloud/storage/test___init__.py @@ -30,26 +30,31 @@ def test_it(self): {'service_account_name': CLIENT_EMAIL, 'private_key': PRIVATE_KEY, 'scope': SCOPE, - }) + }) class Test_get_bucket(unittest2.TestCase): def _callFUT(self, *args, **kw): from gcloud.storage import get_bucket + return get_bucket(*args, **kw) def test_it(self): from tempfile import NamedTemporaryFile from gcloud import storage from gcloud.test_credentials import _Monkey + bucket = object() + class _Connection(object): + def get_bucket(self, bucket_name): self._called_With = bucket_name return bucket connection = _Connection() _called_With = [] + def get_connection(*args, **kw): _called_With.append((args, kw)) return connection @@ -64,6 +69,5 @@ def get_connection(*args, **kw): found = self._callFUT(BUCKET, PROJECT, CLIENT_EMAIL, f.name) self.assertTrue(found is bucket) self.assertEqual(_called_With, - [((PROJECT, CLIENT_EMAIL, f.name), {})]) + [((PROJECT, CLIENT_EMAIL, f.name), {})]) self.assertEqual(connection._called_With, BUCKET) - diff --git a/gcloud/storage/test_acl.py b/gcloud/storage/test_acl.py index f2adc28b2a80..e268eb712f1c 100644 --- a/gcloud/storage/test_acl.py +++ b/gcloud/storage/test_acl.py @@ -192,7 +192,7 @@ def test_entity_from_dict_string_wo_hyphen(self): acl = self._makeOne() self.assertRaises(ValueError, acl.entity_from_dict, - {'entity': 'bogus', 'role': ROLE}) + {'entity': 'bogus', 'role': ROLE}) self.assertEqual(list(acl.get_entities()), []) def test_has_entity_miss_str(self): diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 647f3a16cb40..d32b227ed038 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -254,10 +254,13 @@ def test_upload_file_default_key(self): BASENAME = 'file.ext' FILENAME = '/path/to/%s' % BASENAME _uploaded = [] + class _Key(object): + def __init__(self, bucket, name): self._bucket = bucket self._name = name + def set_contents_from_filename(self, filename): _uploaded.append((self._bucket, self._name, filename)) bucket = self._makeOne() @@ -271,10 +274,13 @@ def test_upload_file_explicit_key(self): FILENAME = '/path/to/file' KEY = 'key' _uploaded = [] + class _Key(object): + def __init__(self, bucket, name): self._bucket = bucket self._name = name + def set_contents_from_filename(self, filename): _uploaded.append((self._bucket, self._name, filename)) bucket = self._makeOne() @@ -288,10 +294,13 @@ def test_upload_file_object_no_key(self): FILENAME = 'file.txt' FILEOBJECT = MockFile(FILENAME) _uploaded = [] + class _Key(object): + def __init__(self, bucket, name): self._bucket = bucket self._name = name + def set_contents_from_file(self, fh): _uploaded.append((self._bucket, self._name, fh)) bucket = self._makeOne() @@ -306,10 +315,13 @@ def test_upload_file_object_explicit_key(self): FILEOBJECT = MockFile(FILENAME) KEY = 'key' _uploaded = [] + class _Key(object): + def __init__(self, bucket, name): self._bucket = bucket self._name = name + def set_contents_from_file(self, fh): _uploaded.append((self._bucket, self._name, fh)) bucket = self._makeOne() @@ -398,7 +410,7 @@ def test_get_metadata_none_set_acl_hit(self): self.assertEqual(kw[0]['path'], '/b/%s' % NAME) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_get_metadata_none_set_defaultObjectAcl_miss_explicit_default(self): + def test_get_metadata_none_set_defaultObjectAcl_miss_clear_default(self): NAME = 'name' after = {'bar': 'Bar'} connection = _Connection(after) @@ -679,8 +691,8 @@ def test_save_default_object_acl_existing_set_none_passed(self): connection = _Connection({'foo': 'Foo', 'acl': []}) connection = _Connection({'foo': 'Foo', 'acl': []}, {'foo': 'Foo', 'acl': [], - 'defaultObjectAcl': []}, - ) + 'defaultObjectAcl': []}, + ) metadata = {'defaultObjectAcl': []} bucket = self._makeOne(connection, NAME, metadata) bucket.reload_default_object_acl() @@ -701,8 +713,8 @@ def test_save_default_object_acl_existing_set_new_passed(self): new_acl = [{'entity': 'allUsers', 'role': ROLE}] connection = _Connection({'foo': 'Foo', 'acl': new_acl}, {'foo': 'Foo', 'acl': new_acl, - 'defaultObjectAcl': new_acl}, - ) + 'defaultObjectAcl': new_acl}, + ) metadata = {'defaultObjectAcl': []} bucket = self._makeOne(connection, NAME, metadata) bucket.reload_default_object_acl() @@ -724,8 +736,8 @@ def test_clear_default_object_acl(self): old_acl = [{'entity': 'allUsers', 'role': ROLE}] connection = _Connection({'foo': 'Foo', 'acl': []}, {'foo': 'Foo', 'acl': [], - 'defaultObjectAcl': []}, - ) + 'defaultObjectAcl': []}, + ) metadata = {'defaultObjectAcl': old_acl} bucket = self._makeOne(connection, NAME, metadata) bucket.reload_default_object_acl() @@ -792,19 +804,26 @@ def test_make_public_recursive(self): from gcloud.storage import iterator from gcloud.storage import bucket as MUT _saved = [] + class _Key(object): _granted = False + def __init__(self, bucket, name): self._bucket = bucket self._name = name + def get_acl(self): return self + def all(self): return self + def grant_read(self): self._granted = True + def save_acl(self): _saved.append((self._bucket, self._name, self._granted)) + class _KeyIterator(iterator.KeyIterator): def get_items_from_response(self, response): for item in response.get('items', []): @@ -835,19 +854,23 @@ def get_items_from_response(self, response): class _Connection(object): _delete_ok = False + def __init__(self, *responses): self._responses = responses self._requested = [] self._deleted = [] + def api_request(self, **kw): from gcloud.storage.exceptions import NotFoundError self._requested.append(kw) + try: response, self._responses = self._responses[0], self._responses[1:] except: raise NotFoundError('miss', None) else: return response + def delete_bucket(self, bucket, force=False): from gcloud.storage.exceptions import NotFoundError self._deleted.append((bucket, force)) @@ -858,6 +881,7 @@ def delete_bucket(self, bucket, force=False): class MockFile(io.StringIO): name = None - def __init__(self, name, buffer_ = None): + + def __init__(self, name, buffer_=None): super(MockFile, self).__init__(buffer_) self.name = name diff --git a/gcloud/storage/test_connection.py b/gcloud/storage/test_connection.py index ea793dafdf90..0fb2209bf136 100644 --- a/gcloud/storage/test_connection.py +++ b/gcloud/storage/test_connection.py @@ -39,6 +39,7 @@ def test_http_w_creds(self): from httplib2 import Http PROJECT = 'project' authorized = object() + class Creds(object): def authorize(self, http): self._called_with = http @@ -55,10 +56,11 @@ def test___iter___empty(self): 'storage', conn.API_VERSION, 'b?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'application/json', - }, '{}') + }, + '{}') keys = list(conn) self.assertEqual(len(keys), 0) self.assertEqual(http._called_with['method'], 'GET') @@ -72,10 +74,11 @@ def test___iter___non_empty(self): 'storage', conn.API_VERSION, 'b?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'application/json', - }, '{"items": [{"name": "%s"}]}' % KEY) + }, + '{"items": [{"name": "%s"}]}' % KEY) keys = list(conn) self.assertEqual(len(keys), 1) self.assertEqual(keys[0].name, KEY) @@ -91,10 +94,11 @@ def test___contains___miss(self): conn.API_VERSION, 'b', 'nonesuch?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '404', 'content-type': 'application/json', - }, '{}') + }, + '{}') self.assertFalse(NONESUCH in conn) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) @@ -108,10 +112,11 @@ def test___contains___hit(self): conn.API_VERSION, 'b', 'key?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'application/json', - }, '{"name": "%s"}' % KEY) + }, + '{"name": "%s"}' % KEY) self.assertTrue(KEY in conn) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) @@ -124,7 +129,7 @@ def test_build_api_url_no_extra_query_params(self): 'storage', conn.API_VERSION, 'foo?project=%s' % PROJECT, - ]) + ]) self.assertEqual(conn.build_api_url('/foo'), URI) def test_build_api_url_w_extra_query_params(self): @@ -137,7 +142,7 @@ def test_build_api_url_w_extra_query_params(self): 'storage', conn.API_VERSION, 'foo' - ]) + ]) uri = conn.build_api_url('/foo', {'bar': 'baz'}) scheme, netloc, path, qs, frag = urlsplit(uri) self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) @@ -153,7 +158,8 @@ def test_make_request_no_data_no_content_type_no_headers(self): URI = 'http://example.com/test' http = conn._http = Http({'status': '200', 'content-type': 'text/plain', - }, '') + }, + '') headers, content = conn.make_request('GET', URI) self.assertEqual(headers['status'], '200') self.assertEqual(headers['content-type'], 'text/plain') @@ -163,8 +169,8 @@ def test_make_request_no_data_no_content_type_no_headers(self): self.assertEqual(http._called_with['body'], None) self.assertEqual(http._called_with['headers'], {'Accept-Encoding': 'gzip', - 'Content-Length': 0, - }) + 'Content-Length': 0, + }) def test_make_request_w_data_no_extra_headers(self): PROJECT = 'project' @@ -172,16 +178,18 @@ def test_make_request_w_data_no_extra_headers(self): URI = 'http://example.com/test' http = conn._http = Http({'status': '200', 'content-type': 'text/plain', - }, '') - headers, content = conn.make_request('GET', URI, {}, 'application/json') + }, + '') + headers, content = conn.make_request('GET', URI, {}, + 'application/json') self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) self.assertEqual(http._called_with['body'], {}) self.assertEqual(http._called_with['headers'], {'Accept-Encoding': 'gzip', - 'Content-Length': 0, + 'Content-Length': 0, 'Content-Type': 'application/json', - }) + }) def test_make_request_w_extra_headers(self): PROJECT = 'project' @@ -189,7 +197,8 @@ def test_make_request_w_extra_headers(self): URI = 'http://example.com/test' http = conn._http = Http({'status': '200', 'content-type': 'text/plain', - }, '') + }, + '') headers, content = conn.make_request('GET', URI, headers={'X-Foo': 'foo'}) self.assertEqual(http._called_with['method'], 'GET') @@ -197,9 +206,9 @@ def test_make_request_w_extra_headers(self): self.assertEqual(http._called_with['body'], None) self.assertEqual(http._called_with['headers'], {'Accept-Encoding': 'gzip', - 'Content-Length': 0, + 'Content-Length': 0, 'X-Foo': 'foo', - }) + }) def test_api_request_defaults(self): PROJECT = 'project' @@ -208,18 +217,18 @@ def test_api_request_defaults(self): URI = '/'.join([conn.API_BASE_URL, 'storage', conn.API_VERSION, - ]) + '%s?project=%s' % (PATH, PROJECT) + ]) + '%s?project=%s' % (PATH, PROJECT) http = conn._http = Http({'status': '200', 'content-type': 'application/json', - }, '{}') + }, '{}') self.assertEqual(conn.api_request('GET', PATH), {}) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) self.assertEqual(http._called_with['body'], None) self.assertEqual(http._called_with['headers'], {'Accept-Encoding': 'gzip', - 'Content-Length': 0, - }) + 'Content-Length': 0, + }) def test_api_request_w_non_json_response(self): PROJECT = 'project' @@ -228,10 +237,11 @@ def test_api_request_w_non_json_response(self): 'storage', conn.API_VERSION, '?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'text/plain', - }, 'CONTENT') + }, + 'CONTENT') self.assertRaises(TypeError, conn.api_request, 'GET', '/') def test_api_request_wo_json_expected(self): @@ -241,10 +251,11 @@ def test_api_request_wo_json_expected(self): 'storage', conn.API_VERSION, '?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'text/plain', - }, 'CONTENT') + }, + 'CONTENT') self.assertEqual(conn.api_request('GET', '/', expect_json=False), 'CONTENT') @@ -257,10 +268,11 @@ def test_api_request_w_query_params(self): 'storage', conn.API_VERSION, '?project=%s&foo=bar' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'application/json', - }, '{}') + }, + '{}') self.assertEqual(conn.api_request('GET', '/', {'foo': 'bar'}), {}) self.assertEqual(http._called_with['method'], 'GET') uri = http._called_with['uri'] @@ -274,8 +286,8 @@ def test_api_request_w_query_params(self): self.assertEqual(http._called_with['body'], None) self.assertEqual(http._called_with['headers'], {'Accept-Encoding': 'gzip', - 'Content-Length': 0, - }) + 'Content-Length': 0, + }) def test_api_request_w_data(self): import json @@ -287,19 +299,20 @@ def test_api_request_w_data(self): 'storage', conn.API_VERSION, '?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'application/json', - }, '{}') + }, + '{}') self.assertEqual(conn.api_request('POST', '/', data=DATA), {}) self.assertEqual(http._called_with['method'], 'POST') self.assertEqual(http._called_with['uri'], URI) self.assertEqual(http._called_with['body'], DATAJ) self.assertEqual(http._called_with['headers'], {'Accept-Encoding': 'gzip', - 'Content-Length': len(DATAJ), + 'Content-Length': len(DATAJ), 'Content-Type': 'application/json', - }) + }) def test_api_request_w_404(self): from gcloud.storage.exceptions import NotFoundError @@ -309,10 +322,11 @@ def test_api_request_w_404(self): 'storage', conn.API_VERSION, '?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '404', 'content-type': 'text/plain', - }, '') + }, + '') self.assertRaises(NotFoundError, conn.api_request, 'GET', '/') def test_api_request_w_500(self): @@ -323,10 +337,11 @@ def test_api_request_w_500(self): 'storage', conn.API_VERSION, '?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '500', 'content-type': 'text/plain', - }, '') + }, + '') self.assertRaises(ConnectionError, conn.api_request, 'GET', '/') def test_get_all_buckets_empty(self): @@ -336,10 +351,11 @@ def test_get_all_buckets_empty(self): 'storage', conn.API_VERSION, 'b?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'application/json', - }, '{}') + }, + '{}') keys = conn.get_all_buckets() self.assertEqual(len(keys), 0) self.assertEqual(http._called_with['method'], 'GET') @@ -353,10 +369,11 @@ def test_get_all_buckets_non_empty(self): 'storage', conn.API_VERSION, 'b?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'application/json', - }, '{"items": [{"name": "%s"}]}' % KEY) + }, + '{"items": [{"name": "%s"}]}' % KEY) keys = conn.get_all_buckets() self.assertEqual(len(keys), 1) self.assertEqual(keys[0].name, KEY) @@ -373,10 +390,11 @@ def test_get_bucket_miss(self): conn.API_VERSION, 'b', 'nonesuch?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '404', 'content-type': 'application/json', - }, '{}') + }, + '{}') self.assertRaises(NotFoundError, conn.get_bucket, NONESUCH) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) @@ -391,10 +409,11 @@ def test_get_bucket_hit(self): conn.API_VERSION, 'b', 'key?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'application/json', - }, '{"name": "%s"}' % KEY) + }, + '{"name": "%s"}' % KEY) bucket = conn.get_bucket(KEY) self.assertTrue(isinstance(bucket, Bucket)) self.assertTrue(bucket.connection is conn) @@ -411,10 +430,11 @@ def test_lookup_miss(self): conn.API_VERSION, 'b', 'nonesuch?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '404', 'content-type': 'application/json', - }, '{}') + }, + '{}') self.assertEqual(conn.lookup(NONESUCH), None) self.assertEqual(http._called_with['method'], 'GET') self.assertEqual(http._called_with['uri'], URI) @@ -429,10 +449,11 @@ def test_lookup_hit(self): conn.API_VERSION, 'b', 'key?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'application/json', - }, '{"name": "%s"}' % KEY) + }, + '{"name": "%s"}' % KEY) bucket = conn.lookup(KEY) self.assertTrue(isinstance(bucket, Bucket)) self.assertTrue(bucket.connection is conn) @@ -449,10 +470,11 @@ def test_create_bucket_ok(self): 'storage', conn.API_VERSION, 'b?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'application/json', - }, '{"name": "%s"}' % KEY) + }, + '{"name": "%s"}' % KEY) bucket = conn.create_bucket(KEY) self.assertTrue(isinstance(bucket, Bucket)) self.assertTrue(bucket.connection is conn) @@ -462,15 +484,21 @@ def test_create_bucket_ok(self): def test_delete_bucket_defaults_miss(self): _deleted_keys = [] + class _Key(object): + def __init__(self, name): self._name = name + def delete(self): _deleted_keys.append(self._name) + class _Bucket(object): + def __init__(self, name): self._name = name self.path = '/b/' + name + def __iter__(self): return iter([_Key(x) for x in ('foo', 'bar')]) PROJECT = 'project' @@ -481,10 +509,12 @@ def __iter__(self): conn.API_VERSION, 'b', 'key?project=%s' % PROJECT, - ]) + ]) http = conn._http = Http({'status': '200', 'content-type': 'application/json', - }, '{}') + }, + '{}') + def _new_bucket(name): return _Bucket(name) conn.new_bucket = _new_bucket diff --git a/gcloud/storage/test_iterator.py b/gcloud/storage/test_iterator.py index 8841ddae3e46..582fbd861e91 100644 --- a/gcloud/storage/test_iterator.py +++ b/gcloud/storage/test_iterator.py @@ -25,6 +25,7 @@ def test___iter__(self): KEY2 = 'key2' ITEM1, ITEM2 = object(), object() ITEMS = {KEY1: ITEM1, KEY2: ITEM2} + def _get_items(response): for item in response.get('items', []): yield ITEMS[item['name']] @@ -73,7 +74,7 @@ def test_get_query_params_w_token(self): iterator.next_page_token = TOKEN self.assertEqual(iterator.get_query_params(), {'pageToken': TOKEN, - }) + }) def test_get_next_page_response_new_no_token_in_response(self): PATH = '/foo' @@ -211,7 +212,7 @@ def test__iter__(self): response2['content-range'] = '10-14/15' connection = _Connection((response1, '0123456789'), (response2, '01234'), - ) + ) key = _Key(connection) iterator = self._makeOne(key) chunks = list(iterator) @@ -250,7 +251,7 @@ def test_has_more_data_invalid(self): connection = _Connection() key = _Key(connection) iterator = self._makeOne(key) - iterator._bytes_written = 10 # no _total_bytes + iterator._bytes_written = 10 # no _total_bytes. self.assertRaises(ValueError, iterator.has_more_data) def test_has_more_data_true(self): @@ -356,10 +357,13 @@ class _Response(dict): def status(self): return self['status'] + class _Connection(object): + def __init__(self, *responses): self._responses = responses self._requested = [] + def make_request(self, **kw): from gcloud.storage.exceptions import NotFoundError self._requested.append(kw) @@ -369,6 +373,7 @@ def make_request(self, **kw): raise NotFoundError('miss', None) else: return response + def api_request(self, **kw): from gcloud.storage.exceptions import NotFoundError self._requested.append(kw) @@ -378,19 +383,24 @@ def api_request(self, **kw): raise NotFoundError('miss', None) else: return response + def build_api_url(self, path, query_params=None): from urllib import urlencode from urlparse import urlunsplit qs = urlencode(query_params or {}) return urlunsplit(('http', 'example.com', path, qs, '')) + class _Bucket(object): path = '/b/name' + def __init__(self, connection): self.connection = connection + class _Key(object): CHUNK_SIZE = 10 path = '/b/name/o/key' + def __init__(self, connection): self.connection = connection diff --git a/gcloud/storage/test_key.py b/gcloud/storage/test_key.py index 28db03c400f3..9c8e773c815b 100644 --- a/gcloud/storage/test_key.py +++ b/gcloud/storage/test_key.py @@ -77,7 +77,8 @@ def test_public_url(self): bucket = _Bucket(connection) key = self._makeOne(bucket, KEY) self.assertEqual(key.public_url, - 'http://commondatastorage.googleapis.com/name/%s' % KEY) + 'http://commondatastorage.googleapis.com/name/%s' % + KEY) def test_exists_miss(self): NONESUCH = 'nonesuch' @@ -116,7 +117,7 @@ def test_get_contents_to_file(self): with _Monkey(MUT, KeyDataIterator=lambda self: iter(_CHUNKS)): key.get_contents_to_file(fh) self.assertEqual(fh.getvalue(), ''.join(_CHUNKS)) - + def test_get_contents_to_filename(self): from tempfile import NamedTemporaryFile from StringIO import StringIO @@ -161,7 +162,7 @@ def test_set_contents_from_file(self): connection = _Connection((loc_response, ''), (chunk1_response, ''), (chunk2_response, ''), - ) + ) bucket = _Bucket(connection) key = self._makeOne(bucket, KEY) key.CHUNK_SIZE = 5 @@ -206,7 +207,7 @@ def test_set_contents_from_filename(self): connection = _Connection((loc_response, ''), (chunk1_response, ''), (chunk2_response, ''), - ) + ) bucket = _Bucket(connection) key = self._makeOne(bucket, KEY) key.CHUNK_SIZE = 5 @@ -250,7 +251,7 @@ def test_set_contents_from_string(self): connection = _Connection((loc_response, ''), (chunk1_response, ''), (chunk2_response, ''), - ) + ) bucket = _Bucket(connection) key = self._makeOne(bucket, KEY) key.CHUNK_SIZE = 5 @@ -560,11 +561,14 @@ class _Response(dict): def status(self): return self.get('status', 200) + class _Connection(object): API_BASE_URL = 'http://example.com' + def __init__(self, *responses): self._responses = responses self._requested = [] + def make_request(self, **kw): from gcloud.storage.exceptions import NotFoundError self._requested.append(kw) @@ -574,6 +578,7 @@ def make_request(self, **kw): raise NotFoundError('miss', None) else: return response + def api_request(self, **kw): from gcloud.storage.exceptions import NotFoundError self._requested.append(kw) @@ -583,7 +588,9 @@ def api_request(self, **kw): raise NotFoundError('miss', None) else: return response - def build_api_url(self, path, query_params=None, api_base_url=API_BASE_URL): + + def build_api_url(self, path, query_params=None, + api_base_url=API_BASE_URL): from urllib import urlencode from urlparse import urlsplit from urlparse import urlunsplit @@ -591,13 +598,17 @@ def build_api_url(self, path, query_params=None, api_base_url=API_BASE_URL): scheme, netloc, _, _, _ = urlsplit(api_base_url) return urlunsplit((scheme, netloc, path, qs, '')) + class _Bucket(object): path = '/b/name' name = 'name' + def __init__(self, connection): self.connection = connection self._keys = {} + def get_key(self, key): - return self._keys.get(key) #XXX s.b. 'key.name'? + return self._keys.get(key) # XXX s.b. 'key.name'? + def delete_key(self, key): - del self._keys[key.name] #XXX s.b. 'key'? + del self._keys[key.name] # XXX s.b. 'key'? diff --git a/gcloud/test_connection.py b/gcloud/test_connection.py index 473da1799fee..1e0ebb8f690f 100644 --- a/gcloud/test_connection.py +++ b/gcloud/test_connection.py @@ -1,5 +1,6 @@ import unittest2 + class TestConnection(unittest2.TestCase): def _getTargetClass(self): @@ -30,8 +31,11 @@ def test_http_wo_creds(self): def test_http_w_creds(self): from httplib2 import Http + authorized = object() + class Creds(object): + def authorize(self, http): self._called_with = http return authorized diff --git a/gcloud/test_credentials.py b/gcloud/test_credentials.py index d9cb47be9ccf..69a06ec4ef0d 100644 --- a/gcloud/test_credentials.py +++ b/gcloud/test_credentials.py @@ -1,5 +1,6 @@ import unittest2 + class TestCredentials(unittest2.TestCase): def _getTargetClass(self): @@ -23,7 +24,7 @@ def test_get_for_service_account_wo_scope(self): {'service_account_name': CLIENT_EMAIL, 'private_key': PRIVATE_KEY, 'scope': None, - }) + }) def test_get_for_service_account_w_scope(self): from tempfile import NamedTemporaryFile @@ -37,23 +38,29 @@ def test_get_for_service_account_w_scope(self): with NamedTemporaryFile() as f: f.write(PRIVATE_KEY) f.flush() - found = cls.get_for_service_account(CLIENT_EMAIL, f.name, SCOPE) + found = cls.get_for_service_account(CLIENT_EMAIL, f.name, + SCOPE) self.assertTrue(found is client._signed) self.assertEqual(client._called_with, {'service_account_name': CLIENT_EMAIL, 'private_key': PRIVATE_KEY, 'scope': SCOPE, - }) + }) + class _Client(object): + def __init__(self): self._signed = object() + def SignedJwtAssertionCredentials(self, **kw): self._called_with = kw return self._signed + class _Monkey(object): # context-manager for replacing module names in the scope of a test. + def __init__(self, module, **kw): self.module = module self.to_restore = dict([(key, getattr(module, key)) for key in kw])