Skip to content

Commit

Permalink
Ditching :class: reference for builtins.
Browse files Browse the repository at this point in the history
Making best attempt at being consistent (i.e. string, not str,
boolean, not bool, etc.).

Also adding a block for `id_or_name` in Key.completed_key.
  • Loading branch information
dhermes committed Jan 8, 2015
1 parent 48bcddc commit 8b950cf
Show file tree
Hide file tree
Showing 14 changed files with 54 additions and 51 deletions.
6 changes: 3 additions & 3 deletions gcloud/credentials.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,15 +68,15 @@ def get_for_service_account_p12(client_email, private_key_path, scope=None):
known explicitly and detecting the environment implicitly would be
superfluous.
:type client_email: :class:`str`
:type client_email: string
:param client_email: The e-mail attached to the service account.
:type private_key_path: :class:`str`
:type private_key_path: string
:param private_key_path: The path to a private key file (this file was
given to you when you created the service
account). This file must be in P12 format.
:type scope: :class:`str` or :class:`tuple` of :class:`str`
:type scope: string or tuple of string
:param scope: The scope against which to authenticate. (Different services
require different scopes, check the documentation for which
scope is required for the different levels of access to any
Expand Down
12 changes: 6 additions & 6 deletions gcloud/datastore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def set_default_dataset_id(dataset_id=None):
Local environment variable used is:
- GCLOUD_DATASET_ID
:type dataset_id: :class:`str`.
:type dataset_id: string
:param dataset_id: Optional. The dataset ID to use as default.
"""
if dataset_id is None:
Expand Down Expand Up @@ -120,10 +120,10 @@ def get_connection():
def _require_dataset_id(dataset_id=None):
"""Infer a dataset ID from the environment, if not passed explicitly.
:type dataset_id: :class:`str`.
:type dataset_id: string
:param dataset_id: Optional.
:rtype: :class:`str`
:rtype: string
:returns: A dataset ID based on the current environment.
:raises: :class:`EnvironmentError` if ``dataset_id`` is ``None``,
and cannot be inferred from the environment.
Expand Down Expand Up @@ -173,7 +173,7 @@ def get_entities(keys, missing=None, deferred=None,
:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Optional. The connection used to connect to datastore.
:type dataset_id: :class:`str`.
:type dataset_id: string
:param dataset_id: Optional. The ID of the dataset.
:rtype: list of :class:`gcloud.datastore.entity.Entity`
Expand Down Expand Up @@ -211,13 +211,13 @@ def allocate_ids(incomplete_key, num_ids, connection=None, dataset_id=None):
:type incomplete_key: A :class:`gcloud.datastore.key.Key`
:param incomplete_key: Partial key to use as base for allocated IDs.
:type num_ids: :class:`int`.
:type num_ids: integer
:param num_ids: The number of IDs to allocate.
:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Optional. The connection used to connect to datastore.
:type dataset_id: :class:`str`.
:type dataset_id: string
:param dataset_id: Optional. The ID of the dataset.
:rtype: list of :class:`gcloud.datastore.key.Key`
Expand Down
12 changes: 6 additions & 6 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def lookup(self, dataset_id, key_pbs,
by the backend as "deferred" will be copied into it.
Use only as a keyword param.
:type eventual: bool
:type eventual: boolean
:param eventual: If False (the default), request ``STRONG`` read
consistency. If True, request ``EVENTUAL`` read
consistency. If the connection has a current
Expand Down Expand Up @@ -294,7 +294,7 @@ def run_query(self, dataset_id, query_pb, namespace=None, eventual=False):
:type namespace: string
:param namespace: The namespace over which to run the query.
:type eventual: bool
:type eventual: boolean
:param eventual: If False (the default), request ``STRONG`` read
consistency. If True, request ``EVENTUAL`` read
consistency. If the connection has a current
Expand Down Expand Up @@ -324,7 +324,7 @@ def begin_transaction(self, dataset_id, serializable=False):
:type dataset_id: string
:param dataset_id: The ID dataset to which the transaction applies.
:type serializable: :class:`bool`
:type serializable: boolean
:param serializable: Boolean indicating if the isolation level of the
transaction should be SERIALIZABLE (True) or
SNAPSHOT (False).
Expand Down Expand Up @@ -438,10 +438,10 @@ def save_entity(self, dataset_id, key_pb, properties,
:type properties: dict
:param properties: The properties to store on the entity.
:type exclude_from_indexes: sequence of str
:type exclude_from_indexes: sequence of string
:param exclude_from_indexes: Names of properties *not* to be indexed.
:rtype: :class:`tuple`
:rtype: tuple
:returns: The pair (``assigned``, ``new_id``) where ``assigned`` is a
boolean indicating if a new ID has been assigned and
``new_id`` is either ``None`` or an integer that has been
Expand Down Expand Up @@ -508,7 +508,7 @@ def delete_entities(self, dataset_id, key_pbs):
:type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param key_pbs: The keys to delete from the datastore.
:rtype: :class:`bool`
:rtype: boolean
:returns: ``True``
"""
mutation = self.mutation()
Expand Down
2 changes: 1 addition & 1 deletion gcloud/datastore/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ class Entity(dict):
:param key: Optional key to be set on entity. Required for :meth:`save()`
or :meth:`reload()`.
:type exclude_from_indexes: :class:`tuple` of :class:`str`
:type exclude_from_indexes: tuple of string
:param exclude_from_indexes: Names of fields whose values are not to be
indexed for this entity.
"""
Expand Down
2 changes: 1 addition & 1 deletion gcloud/datastore/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def _set_protobuf_value(value_pb, val):
:type value_pb: :class:`gcloud.datastore.datastore_v1_pb2.Value`
:param value_pb: The value protobuf to which the value is being assigned.
:type val: `datetime.datetime`, bool, float, integer, string
:type val: `datetime.datetime`, boolean, float, integer, string,
:class:`gcloud.datastore.key.Key`,
:class:`gcloud.datastore.entity.Entity`,
:param val: The value to be assigned.
Expand Down
27 changes: 15 additions & 12 deletions gcloud/datastore/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,15 +50,15 @@ class Key(object):
def __init__(self, *path_args, **kwargs):
"""Constructor / initializer for a key.
:type path_args: :class:`tuple` of :class:`str` and :class:`int`
:type path_args: tuple of string and integer
:param path_args: May represent a partial (odd length) or full (even
length) key path.
:type namespace: :class:`str`
:type namespace: string
:param namespace: A namespace identifier for the key. Can only be
passed as a keyword argument.
:type dataset_id: :class:`str`
:type dataset_id: string
:param dataset_id: The dataset ID associated with the key. Required,
unless the implicit dataset ID has been set. Can
only be passed as a keyword argument.
Expand All @@ -80,7 +80,7 @@ def __init__(self, *path_args, **kwargs):
def _parse_path(path_args):
"""Parses positional arguments into key path with kinds and IDs.
:type path_args: :class:`tuple`
:type path_args: tuple
:param path_args: A tuple from positional arguments. Should be
alternating list of kinds (string) and ID/name
parts (int or string).
Expand Down Expand Up @@ -168,6 +168,9 @@ def _clone(self):
def completed_key(self, id_or_name):
"""Creates new key from existing partial key by adding final ID/name.
:type id_or_name: string or integer
:param id_or_name: ID or name to be added to the key.
:rtype: :class:`gcloud.datastore.key.Key`
:returns: A new ``Key`` instance with the same data as the current one
and an extra ID or name added.
Expand Down Expand Up @@ -255,7 +258,7 @@ def delete(self, connection=None):
def is_partial(self):
"""Boolean indicating if the key has an ID (or name).
:rtype: :class:`bool`
:rtype: boolean
:returns: ``True`` if the last element of the key's path does not have
an ``id`` or a ``name``.
"""
Expand All @@ -265,7 +268,7 @@ def is_partial(self):
def namespace(self):
"""Namespace getter.
:rtype: :class:`str`
:rtype: string
:returns: The namespace of the current key.
"""
return self._namespace
Expand All @@ -285,7 +288,7 @@ def path(self):
def flat_path(self):
"""Getter for the key path as a tuple.
:rtype: :class:`tuple` of :class:`str` and :class:`int`
:rtype: tuple of string and integer
:returns: The tuple of elements in the path.
"""
return self._flat_path
Expand All @@ -294,7 +297,7 @@ def flat_path(self):
def kind(self):
"""Kind getter. Based on the last element of path.
:rtype: :class:`str`
:rtype: string
:returns: The kind of the current key.
"""
return self.path[-1]['kind']
Expand All @@ -303,7 +306,7 @@ def kind(self):
def id(self):
"""ID getter. Based on the last element of path.
:rtype: :class:`int`
:rtype: integer
:returns: The (integer) ID of the key.
"""
return self.path[-1].get('id')
Expand All @@ -312,7 +315,7 @@ def id(self):
def name(self):
"""Name getter. Based on the last element of path.
:rtype: :class:`str`
:rtype: string
:returns: The (string) name of the key.
"""
return self.path[-1].get('name')
Expand All @@ -321,7 +324,7 @@ def name(self):
def id_or_name(self):
"""Getter. Based on the last element of path.
:rtype: :class:`int` (if ``id``) or :class:`str` (if ``name``)
:rtype: integer (if ``id``) or string (if ``name``)
:returns: The last element of the key's path if it is either an ``id``
or a ``name``.
"""
Expand All @@ -331,7 +334,7 @@ def id_or_name(self):
def dataset_id(self):
"""Dataset ID getter.
:rtype: :class:`str`
:rtype: string
:returns: The key's dataset ID.
"""
return self._dataset_id
Expand Down
16 changes: 8 additions & 8 deletions gcloud/datastore/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,31 +28,31 @@ class Query(object):
This class serves as an abstraction for creating a query over data
stored in the Cloud Datastore.
:type kind: string.
:type kind: string
:param kind: The kind to query.
:type dataset_id: str
:type dataset_id: string
:param dataset_id: The ID of the dataset to query. If not passed,
uses the implicit default.
:type namespace: string or None.
:type namespace: string or None
:param namespace: The namespace to which to restrict results.
:type ancestor: :class:`gcloud.datastore.key.Key` or None.
:type ancestor: :class:`gcloud.datastore.key.Key` or None
:param ancestor: key of the ancestor to which this query's results are
restricted.
:type filters: sequence of (property_name, operator, value) tuples.
:type filters: sequence of (property_name, operator, value) tuples
:param filters: property filters applied by this query.
:type projection: sequence of string.
:type projection: sequence of string
:param projection: fields returned as part of query results.
:type order: sequence of string.
:type order: sequence of string
:param order: field names used to order query results. Prepend '-'
to a field name to sort it in descending order.
:type group_by: sequence_of_string.
:type group_by: sequence of string
:param group_by: field names used to group query results.
:raises: ValueError if ``dataset_id`` is not passed and no implicit
Expand Down
4 changes: 2 additions & 2 deletions gcloud/datastore/transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ class Transaction(object):
Technically, it looks like the Protobuf API supports this type of
pattern, however it makes the code particularly messy.
:type dataset_id: :class:`str`.
:type dataset_id: string
:param dataset_id: The ID of the dataset.
:type connection: :class:`gcloud.datastore.connection.Connection`
Expand All @@ -138,7 +138,7 @@ def __init__(self, dataset_id=None, connection=None):
def dataset_id(self):
"""Getter for dataset ID in which the transaction will run.
:rtype: :class:`str`
:rtype: string
:returns: The dataset ID in which the transaction will run.
"""
return self._dataset_id
Expand Down
2 changes: 1 addition & 1 deletion gcloud/storage/acl.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def has_entity(self, entity):
:type entity: :class:`_ACLEntity`
:param entity: The entity to check for existence in this ACL.
:rtype: bool
:rtype: boolean
:returns: True of the entity exists in the ACL.
"""
self._ensure_loaded()
Expand Down
8 changes: 4 additions & 4 deletions gcloud/storage/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ def delete(self, force=False):
parameter set to true. This will iterate through the bucket's
keys and delete the related objects, before deleting the bucket.
:type force: bool
:type force: boolean
:param full: If True, empties the bucket's objects then deletes it.
:raises: :class:`gcloud.storage.exceptions.NotFound` if the
Expand Down Expand Up @@ -636,7 +636,7 @@ def versioning_enabled(self, value):
See: https://cloud.google.com/storage/docs/object-versioning for
details.
:type value: convertible to bool
:type value: convertible to boolean
:param value: should versioning be anabled for the bucket?
"""
self._patch_properties({'versioning': {'enabled': bool(value)}})
Expand Down Expand Up @@ -694,11 +694,11 @@ def disable_website(self):
def make_public(self, recursive=False, future=False):
"""Make a bucket public.
:type recursive: bool
:type recursive: boolean
:param recursive: If True, this will make all keys inside the bucket
public as well.
:type future: bool
:type future: boolean
:param future: If True, this will make all objects created in the
future public as well.
"""
Expand Down
6 changes: 3 additions & 3 deletions gcloud/storage/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ def api_request(self, method, path, query_params=None,
latest API version supported by
gcloud-python.
:type expect_json: bool
:type expect_json: boolean
:param expect_json: If True, this method will try to parse the
response as JSON and raise an exception if
that cannot be done. Default is True.
Expand Down Expand Up @@ -464,10 +464,10 @@ def delete_bucket(self, bucket, force=False):
:type bucket: string or :class:`gcloud.storage.bucket.Bucket`
:param bucket: The bucket name (or bucket object) to create.
:type force: bool
:type force: boolean
:param full: If True, empties the bucket's objects then deletes it.
:rtype: bool
:rtype: boolean
:returns: True if the bucket was deleted.
:raises: :class:`gcloud.storage.exceptions.NotFound` if the
bucket doesn't exist, or
Expand Down
2 changes: 1 addition & 1 deletion gcloud/storage/iterator.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def __iter__(self):
def has_next_page(self):
"""Determines whether or not this iterator has more pages.
:rtype: bool
:rtype: boolean
:returns: Whether the iterator has more pages or not.
"""
if self.page_number == 0:
Expand Down
4 changes: 2 additions & 2 deletions gcloud/storage/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def generate_signed_url(self, expiration, method='GET'):
def exists(self):
"""Determines whether or not this key exists.
:rtype: bool
:rtype: boolean
:returns: True if the key exists in Cloud Storage.
"""
return self.bucket.get_key(self.name) is not None
Expand Down Expand Up @@ -289,7 +289,7 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
:type file_obj: file
:param file_obj: A file handle open for reading.
:type rewind: bool
:type rewind: boolean
:param rewind: If True, seek to the beginning of the file handle before
writing the file to Cloud Storage.
Expand Down
2 changes: 1 addition & 1 deletion run_pylint.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def valid_filename(filename):
def is_production_filename(filename):
"""Checks if the file contains production code.
:rtype: `bool`
:rtype: boolean
:returns: Boolean indicating production status.
"""
return not ('demo' in filename or 'test' in filename
Expand Down

0 comments on commit 8b950cf

Please sign in to comment.