From 75ea78851523a1eb6baa2b0f3a175cd2007228d6 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 14 Sep 2017 14:43:08 -0400 Subject: [PATCH] bigquery: rename name field of Dataset to dataset_id (#3955) * bigquery: rename name field of Dataset to dataset_id Rename the former dataset_id property to full_dataset_id. Also rename Table.dataset_name to Table.dataset_id. Perform other renamings (of various variables and constants). These names match usage better. The API's Dataset.id field is "project:dataset_id", which is confusing and basically useless, so it's a mistake to call that dataset_id. * fix long line * fix long line --- bigquery/google/cloud/bigquery/dataset.py | 25 ++-- bigquery/google/cloud/bigquery/job.py | 12 +- bigquery/google/cloud/bigquery/query.py | 2 +- bigquery/google/cloud/bigquery/table.py | 14 +-- bigquery/tests/system.py | 70 +++++------ bigquery/tests/unit/test_client.py | 2 +- bigquery/tests/unit/test_dataset.py | 144 +++++++++++----------- bigquery/tests/unit/test_job.py | 96 +++++++-------- bigquery/tests/unit/test_table.py | 105 ++++++++-------- 9 files changed, 236 insertions(+), 234 deletions(-) diff --git a/bigquery/google/cloud/bigquery/dataset.py b/bigquery/google/cloud/bigquery/dataset.py index e31b4a2a93b1..cd31f737e693 100644 --- a/bigquery/google/cloud/bigquery/dataset.py +++ b/bigquery/google/cloud/bigquery/dataset.py @@ -142,8 +142,8 @@ class Dataset(object): See https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets - :type name: str - :param name: the name of the dataset + :type dataset_id: str + :param dataset_id: the ID of the dataset :type client: :class:`google.cloud.bigquery.client.Client` :param client: A client which holds credentials and project configuration @@ -159,8 +159,8 @@ class Dataset(object): _access_entries = None - def __init__(self, name, client, access_entries=(), project=None): - self.name = name + def __init__(self, dataset_id, client, access_entries=(), project=None): + self.dataset_id = dataset_id self._client = client self._properties = {} # Let the @property do validation. @@ -181,9 +181,9 @@ def path(self): """URL path for the dataset's APIs. :rtype: str - :returns: the path based on project and dataste name. + :returns: the path based on project and dataset ID. """ - return '/projects/%s/datasets/%s' % (self.project, self.name) + return '/projects/%s/datasets/%s' % (self.project, self.dataset_id) @property def access_entries(self): @@ -221,8 +221,8 @@ def created(self): return _datetime_from_microseconds(1000.0 * creation_time) @property - def dataset_id(self): - """ID for the dataset resource. + def full_dataset_id(self): + """ID for the dataset resource, in the form "project_id:dataset_id". :rtype: str, or ``NoneType`` :returns: the ID (None until set from the server). @@ -365,8 +365,8 @@ def from_api_repr(cls, resource, client): 'datasetId' not in resource['datasetReference']): raise KeyError('Resource lacks required identity information:' '["datasetReference"]["datasetId"]') - name = resource['datasetReference']['datasetId'] - dataset = cls(name, client=client) + dataset_id = resource['datasetReference']['datasetId'] + dataset = cls(dataset_id, client=client) dataset._set_properties(resource) return dataset @@ -444,7 +444,7 @@ def _build_resource(self): """Generate a resource for ``create`` or ``update``.""" resource = { 'datasetReference': { - 'projectId': self.project, 'datasetId': self.name}, + 'projectId': self.project, 'datasetId': self.dataset_id}, } if self.default_table_expiration_ms is not None: value = self.default_table_expiration_ms @@ -610,7 +610,8 @@ def list_tables(self, max_results=None, page_token=None): :returns: Iterator of :class:`~google.cloud.bigquery.table.Table` contained within the current dataset. """ - path = '/projects/%s/datasets/%s/tables' % (self.project, self.name) + path = '/projects/%s/datasets/%s/tables' % ( + self.project, self.dataset_id) result = page_iterator.HTTPIterator( client=self._client, api_request=self._client._connection.api_request, diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 6f5c2c294a0c..f513a98d23cd 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -773,7 +773,7 @@ def _build_resource(self): 'sourceUris': self.source_uris, 'destinationTable': { 'projectId': self.destination.project, - 'datasetId': self.destination.dataset_name, + 'datasetId': self.destination.dataset_id, 'tableId': self.destination.name, }, }, @@ -900,7 +900,7 @@ def _build_resource(self): source_refs = [{ 'projectId': table.project, - 'datasetId': table.dataset_name, + 'datasetId': table.dataset_id, 'tableId': table.name, } for table in self.sources] @@ -914,7 +914,7 @@ def _build_resource(self): 'sourceTables': source_refs, 'destinationTable': { 'projectId': self.destination.project, - 'datasetId': self.destination.dataset_name, + 'datasetId': self.destination.dataset_id, 'tableId': self.destination.name, }, }, @@ -1058,7 +1058,7 @@ def _build_resource(self): source_ref = { 'projectId': self.source.project, - 'datasetId': self.source.dataset_name, + 'datasetId': self.source.dataset_id, 'tableId': self.source.name, } @@ -1247,7 +1247,7 @@ def _destination_table_resource(self): if self.destination is not None: return { 'projectId': self.destination.project, - 'datasetId': self.destination.dataset_name, + 'datasetId': self.destination.dataset_id, 'tableId': self.destination.name, } @@ -1271,7 +1271,7 @@ def _populate_config_resource(self, configuration): if self.default_dataset is not None: configuration['defaultDataset'] = { 'projectId': self.default_dataset.project, - 'datasetId': self.default_dataset.name, + 'datasetId': self.default_dataset.dataset_id, } if self.destination is not None: table_res = self._destination_table_resource() diff --git a/bigquery/google/cloud/bigquery/query.py b/bigquery/google/cloud/bigquery/query.py index 185b68deb104..fa03d373674d 100644 --- a/bigquery/google/cloud/bigquery/query.py +++ b/bigquery/google/cloud/bigquery/query.py @@ -329,7 +329,7 @@ def _build_resource(self): if self.default_dataset is not None: resource['defaultDataset'] = { 'projectId': self.project, - 'datasetId': self.default_dataset.name, + 'datasetId': self.default_dataset.dataset_id, } if self.max_results is not None: diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index 69d99ab4450f..e06e79271d0a 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -119,13 +119,13 @@ def project(self): return self._dataset.project @property - def dataset_name(self): - """Name of dataset containing the table. + def dataset_id(self): + """ID of dataset containing the table. :rtype: str :returns: the ID (derived from the dataset). """ - return self._dataset.name + return self._dataset.dataset_id @property def path(self): @@ -463,7 +463,7 @@ def list_partitions(self, client=None): """ query = self._require_client(client).run_sync_query( 'SELECT partition_id from [%s.%s$__PARTITIONS_SUMMARY__]' % - (self.dataset_name, self.name)) + (self.dataset_id, self.name)) query.run() return [row[0] for row in query.rows] @@ -527,7 +527,7 @@ def _build_resource(self): resource = { 'tableReference': { 'projectId': self._dataset.project, - 'datasetId': self._dataset.name, + 'datasetId': self._dataset.dataset_id, 'tableId': self.name}, } if self.description is not None: @@ -572,7 +572,7 @@ def create(self, client=None): """ client = self._require_client(client) path = '/projects/%s/datasets/%s/tables' % ( - self._dataset.project, self._dataset.name) + self._dataset.project, self._dataset.dataset_id) api_response = client._connection.api_request( method='POST', path=path, data=self._build_resource()) self._set_properties(api_response) @@ -1369,7 +1369,7 @@ def _get_upload_metadata(source_format, schema, dataset, name): 'sourceFormat': source_format, 'destinationTable': { 'projectId': dataset.project, - 'datasetId': dataset.name, + 'datasetId': dataset.dataset_id, 'tableId': name, }, } diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index 3f1817706672..ad93ac2c954e 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -44,7 +44,7 @@ def _has_rows(result): return len(result) > 0 -def _make_dataset_name(prefix): +def _make_dataset_id(prefix): return '%s%s' % (prefix, unique_resource_id()) @@ -111,32 +111,32 @@ def _still_in_use(bad_request): doomed.delete() def test_create_dataset(self): - DATASET_NAME = _make_dataset_name('create_dataset') - dataset = Dataset(DATASET_NAME, Config.CLIENT) + DATASET_ID = _make_dataset_id('create_dataset') + dataset = Dataset(DATASET_ID, Config.CLIENT) self.assertFalse(dataset.exists()) retry_403(dataset.create)() self.to_delete.append(dataset) self.assertTrue(dataset.exists()) - self.assertEqual(dataset.name, DATASET_NAME) + self.assertEqual(dataset.dataset_id, DATASET_ID) def test_reload_dataset(self): - DATASET_NAME = _make_dataset_name('reload_dataset') - dataset = Dataset(DATASET_NAME, Config.CLIENT) + DATASET_ID = _make_dataset_id('reload_dataset') + dataset = Dataset(DATASET_ID, Config.CLIENT) dataset.friendly_name = 'Friendly' dataset.description = 'Description' retry_403(dataset.create)() self.to_delete.append(dataset) - other = Dataset(DATASET_NAME, Config.CLIENT) + other = Dataset(DATASET_ID, Config.CLIENT) other.reload() self.assertEqual(other.friendly_name, 'Friendly') self.assertEqual(other.description, 'Description') def test_patch_dataset(self): - dataset = Dataset(_make_dataset_name('patch_dataset'), Config.CLIENT) + dataset = Dataset(_make_dataset_id('patch_dataset'), Config.CLIENT) self.assertFalse(dataset.exists()) retry_403(dataset.create)() @@ -150,7 +150,7 @@ def test_patch_dataset(self): self.assertEqual(dataset.description, 'Description') def test_update_dataset(self): - dataset = Dataset(_make_dataset_name('update_dataset'), Config.CLIENT) + dataset = Dataset(_make_dataset_id('update_dataset'), Config.CLIENT) self.assertFalse(dataset.exists()) retry_403(dataset.create)() @@ -175,8 +175,8 @@ def test_list_datasets(self): 'newer' + unique_resource_id(), 'newest' + unique_resource_id(), ] - for dataset_name in datasets_to_create: - created_dataset = Dataset(dataset_name, Config.CLIENT) + for dataset_id in datasets_to_create: + created_dataset = Dataset(dataset_id, Config.CLIENT) retry_403(created_dataset.create)() self.to_delete.append(created_dataset) @@ -185,12 +185,12 @@ def test_list_datasets(self): all_datasets = list(iterator) self.assertIsNone(iterator.next_page_token) created = [dataset for dataset in all_datasets - if dataset.name in datasets_to_create and + if dataset.dataset_id in datasets_to_create and dataset.project == Config.CLIENT.project] self.assertEqual(len(created), len(datasets_to_create)) def test_create_table(self): - dataset = Dataset(_make_dataset_name('create_table'), Config.CLIENT) + dataset = Dataset(_make_dataset_id('create_table'), Config.CLIENT) self.assertFalse(dataset.exists()) retry_403(dataset.create)() @@ -208,8 +208,8 @@ def test_create_table(self): self.assertEqual(table.name, TABLE_NAME) def test_list_tables(self): - DATASET_NAME = _make_dataset_name('list_tables') - dataset = Dataset(DATASET_NAME, Config.CLIENT) + DATASET_ID = _make_dataset_id('list_tables') + dataset = Dataset(DATASET_ID, Config.CLIENT) self.assertFalse(dataset.exists()) retry_403(dataset.create)() @@ -241,11 +241,11 @@ def test_list_tables(self): self.assertIsNone(iterator.next_page_token) created = [table for table in all_tables if (table.name in tables_to_create and - table.dataset_name == DATASET_NAME)] + table.dataset_id == DATASET_ID)] self.assertEqual(len(created), len(tables_to_create)) def test_patch_table(self): - dataset = Dataset(_make_dataset_name('patch_table'), Config.CLIENT) + dataset = Dataset(_make_dataset_id('patch_table'), Config.CLIENT) self.assertFalse(dataset.exists()) retry_403(dataset.create)() @@ -267,7 +267,7 @@ def test_patch_table(self): self.assertEqual(table.description, 'Description') def test_update_table(self): - dataset = Dataset(_make_dataset_name('update_table'), Config.CLIENT) + dataset = Dataset(_make_dataset_id('update_table'), Config.CLIENT) self.assertFalse(dataset.exists()) retry_403(dataset.create)() @@ -311,7 +311,7 @@ def test_insert_data_then_dump_table(self): ] ROW_IDS = range(len(ROWS)) dataset = Dataset( - _make_dataset_name('insert_data_then_dump'), Config.CLIENT) + _make_dataset_id('insert_data_then_dump'), Config.CLIENT) self.assertFalse(dataset.exists()) retry_403(dataset.create)() @@ -355,7 +355,7 @@ def test_load_table_from_local_file_then_dump_table(self): TABLE_NAME = 'test_table' dataset = Dataset( - _make_dataset_name('load_local_then_dump'), Config.CLIENT) + _make_dataset_id('load_local_then_dump'), Config.CLIENT) retry_403(dataset.create)() self.to_delete.append(dataset) @@ -404,7 +404,7 @@ def test_load_table_from_local_avro_file_then_dump_table(self): ("red", 650)] dataset = Dataset( - _make_dataset_name('load_local_then_dump'), Config.CLIENT) + _make_dataset_id('load_local_then_dump'), Config.CLIENT) retry_403(dataset.create)() self.to_delete.append(dataset) @@ -468,7 +468,7 @@ def test_load_table_from_storage_then_dump_table(self): self.to_delete.insert(0, blob) dataset = Dataset( - _make_dataset_name('load_gcs_then_dump'), Config.CLIENT) + _make_dataset_id('load_gcs_then_dump'), Config.CLIENT) retry_403(dataset.create)() self.to_delete.append(dataset) @@ -538,7 +538,7 @@ def test_load_table_from_storage_w_autodetect_schema(self): self.to_delete.insert(0, blob) dataset = Dataset( - _make_dataset_name('load_gcs_then_dump'), Config.CLIENT) + _make_dataset_id('load_gcs_then_dump'), Config.CLIENT) retry_403(dataset.create)() self.to_delete.append(dataset) @@ -570,12 +570,12 @@ def test_load_table_from_storage_w_autodetect_schema(self): sorted(actual_rows, key=by_age), sorted(rows, key=by_age)) def test_job_cancel(self): - DATASET_NAME = _make_dataset_name('job_cancel') - JOB_NAME = 'fetch_' + DATASET_NAME + DATASET_ID = _make_dataset_id('job_cancel') + JOB_NAME = 'fetch_' + DATASET_ID TABLE_NAME = 'test_table' - QUERY = 'SELECT * FROM %s.%s' % (DATASET_NAME, TABLE_NAME) + QUERY = 'SELECT * FROM %s.%s' % (DATASET_ID, TABLE_NAME) - dataset = Dataset(DATASET_NAME, Config.CLIENT) + dataset = Dataset(DATASET_ID, Config.CLIENT) retry_403(dataset.create)() self.to_delete.append(dataset) @@ -796,7 +796,7 @@ def _load_table_for_dml(self, rows, dataset_name, table_name): self._fetch_single_page(table) def test_sync_query_w_dml(self): - dataset_name = _make_dataset_name('dml_tests') + dataset_name = _make_dataset_id('dml_tests') table_name = 'test_table' self._load_table_for_dml([('Hello World',)], dataset_name, table_name) query_template = """UPDATE {}.{} @@ -812,7 +812,7 @@ def test_sync_query_w_dml(self): self.assertEqual(query.num_dml_affected_rows, 1) def test_dbapi_w_dml(self): - dataset_name = _make_dataset_name('dml_tests') + dataset_name = _make_dataset_id('dml_tests') table_name = 'test_table' self._load_table_for_dml([('Hello World',)], dataset_name, table_name) query_template = """UPDATE {}.{} @@ -1097,10 +1097,10 @@ def test_dbapi_w_query_parameters(self): def test_dump_table_w_public_data(self): PUBLIC = 'bigquery-public-data' - DATASET_NAME = 'samples' + DATASET_ID = 'samples' TABLE_NAME = 'natality' - dataset = Dataset(DATASET_NAME, Config.CLIENT, project=PUBLIC) + dataset = Dataset(DATASET_ID, Config.CLIENT, project=PUBLIC) table = dataset.table(TABLE_NAME) # Reload table to get the schema before fetching the rows. table.reload() @@ -1108,11 +1108,11 @@ def test_dump_table_w_public_data(self): def test_large_query_w_public_data(self): PUBLIC = 'bigquery-public-data' - DATASET_NAME = 'samples' + DATASET_ID = 'samples' TABLE_NAME = 'natality' LIMIT = 1000 SQL = 'SELECT * from `{}.{}.{}` LIMIT {}'.format( - PUBLIC, DATASET_NAME, TABLE_NAME, LIMIT) + PUBLIC, DATASET_ID, TABLE_NAME, LIMIT) query = Config.CLIENT.run_sync_query(SQL) query.use_legacy_sql = False @@ -1154,7 +1154,7 @@ def test_insert_nested_nested(self): ] table_name = 'test_table' dataset = Dataset( - _make_dataset_name('issue_2951'), Config.CLIENT) + _make_dataset_id('issue_2951'), Config.CLIENT) retry_403(dataset.create)() self.to_delete.append(dataset) @@ -1174,7 +1174,7 @@ def test_create_table_insert_fetch_nested_schema(self): table_name = 'test_table' dataset = Dataset( - _make_dataset_name('create_table_nested_schema'), Config.CLIENT) + _make_dataset_id('create_table_nested_schema'), Config.CLIENT) self.assertFalse(dataset.exists()) retry_403(dataset.create)() diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index 70e1f1eea7c7..fffffb9b2b25 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -210,7 +210,7 @@ def test_list_datasets_defaults(self): self.assertEqual(len(datasets), len(DATA['datasets'])) for found, expected in zip(datasets, DATA['datasets']): self.assertIsInstance(found, Dataset) - self.assertEqual(found.dataset_id, expected['id']) + self.assertEqual(found.full_dataset_id, expected['id']) self.assertEqual(found.friendly_name, expected['friendlyName']) self.assertEqual(token, TOKEN) diff --git a/bigquery/tests/unit/test_dataset.py b/bigquery/tests/unit/test_dataset.py index 09fdbbe034ce..e1db93a973e4 100644 --- a/bigquery/tests/unit/test_dataset.py +++ b/bigquery/tests/unit/test_dataset.py @@ -110,7 +110,7 @@ def test_table(self): class TestDataset(unittest.TestCase): PROJECT = 'project' - DS_NAME = 'dataset-name' + DS_ID = 'dataset-id' @staticmethod def _get_target_class(): @@ -129,7 +129,7 @@ def _setUpConstants(self): self.WHEN = datetime.datetime.utcfromtimestamp(self.WHEN_TS).replace( tzinfo=UTC) self.ETAG = 'ETAG' - self.DS_ID = '%s:%s' % (self.PROJECT, self.DS_NAME) + self.DS_FULL_ID = '%s:%s' % (self.PROJECT, self.DS_ID) self.RESOURCE_URL = 'http://example.com/path/to/resource' def _makeResource(self): @@ -139,9 +139,9 @@ def _makeResource(self): return { 'creationTime': self.WHEN_TS * 1000, 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_NAME}, + {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, 'etag': self.ETAG, - 'id': self.DS_ID, + 'id': self.DS_FULL_ID, 'lastModifiedTime': self.WHEN_TS * 1000, 'location': 'US', 'selfLink': self.RESOURCE_URL, @@ -209,17 +209,17 @@ def _verify_resource_properties(self, dataset, resource): def test_ctor_defaults(self): client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client) - self.assertEqual(dataset.name, self.DS_NAME) + dataset = self._make_one(self.DS_ID, client) + self.assertEqual(dataset.dataset_id, self.DS_ID) self.assertIs(dataset._client, client) self.assertEqual(dataset.project, client.project) self.assertEqual( dataset.path, - '/projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME)) + '/projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID)) self.assertEqual(dataset.access_entries, []) self.assertIsNone(dataset.created) - self.assertIsNone(dataset.dataset_id) + self.assertIsNone(dataset.full_dataset_id) self.assertIsNone(dataset.etag) self.assertIsNone(dataset.modified) self.assertIsNone(dataset.self_link) @@ -237,19 +237,19 @@ def test_ctor_explicit(self): entries = [phred, bharney] OTHER_PROJECT = 'foo-bar-123' client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client, + dataset = self._make_one(self.DS_ID, client, access_entries=entries, project=OTHER_PROJECT) - self.assertEqual(dataset.name, self.DS_NAME) + self.assertEqual(dataset.dataset_id, self.DS_ID) self.assertIs(dataset._client, client) self.assertEqual(dataset.project, OTHER_PROJECT) self.assertEqual( dataset.path, - '/projects/%s/datasets/%s' % (OTHER_PROJECT, self.DS_NAME)) + '/projects/%s/datasets/%s' % (OTHER_PROJECT, self.DS_ID)) self.assertEqual(dataset.access_entries, entries) self.assertIsNone(dataset.created) - self.assertIsNone(dataset.dataset_id) + self.assertIsNone(dataset.full_dataset_id) self.assertIsNone(dataset.etag) self.assertIsNone(dataset.modified) self.assertIsNone(dataset.self_link) @@ -261,7 +261,7 @@ def test_ctor_explicit(self): def test_access_entries_setter_non_list(self): client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client) + dataset = self._make_one(self.DS_ID, client) with self.assertRaises(TypeError): dataset.access_entries = object() @@ -269,7 +269,7 @@ def test_access_entries_setter_invalid_field(self): from google.cloud.bigquery.dataset import AccessEntry client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client) + dataset = self._make_one(self.DS_ID, client) phred = AccessEntry('OWNER', 'userByEmail', 'phred@example.com') with self.assertRaises(ValueError): dataset.access_entries = [phred, object()] @@ -278,7 +278,7 @@ def test_access_entries_setter(self): from google.cloud.bigquery.dataset import AccessEntry client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client) + dataset = self._make_one(self.DS_ID, client) phred = AccessEntry('OWNER', 'userByEmail', 'phred@example.com') bharney = AccessEntry('OWNER', 'userByEmail', 'bharney@example.com') dataset.access_entries = [phred, bharney] @@ -286,49 +286,49 @@ def test_access_entries_setter(self): def test_default_table_expiration_ms_setter_bad_value(self): client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client) + dataset = self._make_one(self.DS_ID, client) with self.assertRaises(ValueError): dataset.default_table_expiration_ms = 'bogus' def test_default_table_expiration_ms_setter(self): client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client) + dataset = self._make_one(self.DS_ID, client) dataset.default_table_expiration_ms = 12345 self.assertEqual(dataset.default_table_expiration_ms, 12345) def test_description_setter_bad_value(self): client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client) + dataset = self._make_one(self.DS_ID, client) with self.assertRaises(ValueError): dataset.description = 12345 def test_description_setter(self): client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client) + dataset = self._make_one(self.DS_ID, client) dataset.description = 'DESCRIPTION' self.assertEqual(dataset.description, 'DESCRIPTION') def test_friendly_name_setter_bad_value(self): client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client) + dataset = self._make_one(self.DS_ID, client) with self.assertRaises(ValueError): dataset.friendly_name = 12345 def test_friendly_name_setter(self): client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client) + dataset = self._make_one(self.DS_ID, client) dataset.friendly_name = 'FRIENDLY' self.assertEqual(dataset.friendly_name, 'FRIENDLY') def test_location_setter_bad_value(self): client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client) + dataset = self._make_one(self.DS_ID, client) with self.assertRaises(ValueError): dataset.location = 12345 def test_location_setter(self): client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client) + dataset = self._make_one(self.DS_ID, client) dataset.location = 'LOCATION' self.assertEqual(dataset.location, 'LOCATION') @@ -344,10 +344,10 @@ def test_from_api_repr_bare(self): self._setUpConstants() client = _Client(self.PROJECT) RESOURCE = { - 'id': '%s:%s' % (self.PROJECT, self.DS_NAME), + 'id': '%s:%s' % (self.PROJECT, self.DS_ID), 'datasetReference': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, } } klass = self._get_target_class() @@ -368,7 +368,7 @@ def test__parse_access_entries_w_unknown_entity_type(self): {'role': 'READER', 'unknown': 'UNKNOWN'}, ] client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) with self.assertRaises(ValueError): dataset._parse_access_entries(ACCESS) @@ -382,7 +382,7 @@ def test__parse_access_entries_w_extra_keys(self): }, ] client = _Client(self.PROJECT) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) with self.assertRaises(ValueError): dataset._parse_access_entries(ACCESS) @@ -391,7 +391,7 @@ def test_create_w_bound_client(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) dataset.create() @@ -401,7 +401,7 @@ def test_create_w_bound_client(self): self.assertEqual(req['path'], '/%s' % PATH) SENT = { 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_NAME}, + {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, } self.assertEqual(req['data'], SENT) self._verify_resource_properties(dataset, RESOURCE) @@ -421,7 +421,7 @@ def test_create_w_alternate_client(self): CLIENT1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - dataset = self._make_one(self.DS_NAME, client=CLIENT1) + dataset = self._make_one(self.DS_ID, client=CLIENT1) dataset.friendly_name = TITLE dataset.description = DESCRIPTION VIEW = { @@ -448,7 +448,7 @@ def test_create_w_alternate_client(self): SENT = { 'datasetReference': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, }, 'description': DESCRIPTION, 'friendlyName': TITLE, @@ -474,7 +474,7 @@ def test_create_w_missing_output_properties(self): self.WHEN = None conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) dataset.create() @@ -484,16 +484,16 @@ def test_create_w_missing_output_properties(self): self.assertEqual(req['path'], '/%s' % PATH) SENT = { 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_NAME}, + {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, } self.assertEqual(req['data'], SENT) self._verify_resource_properties(dataset, RESOURCE) def test_exists_miss_w_bound_client(self): - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) conn = _Connection() client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) self.assertFalse(dataset.exists()) @@ -504,12 +504,12 @@ def test_exists_miss_w_bound_client(self): self.assertEqual(req['query_params'], {'fields': 'id'}) def test_exists_hit_w_alternate_client(self): - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) conn1 = _Connection() CLIENT1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection({}) CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - dataset = self._make_one(self.DS_NAME, client=CLIENT1) + dataset = self._make_one(self.DS_ID, client=CLIENT1) self.assertTrue(dataset.exists(client=CLIENT2)) @@ -521,11 +521,11 @@ def test_exists_hit_w_alternate_client(self): self.assertEqual(req['query_params'], {'fields': 'id'}) def test_reload_w_bound_client(self): - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) dataset.reload() @@ -536,13 +536,13 @@ def test_reload_w_bound_client(self): self._verify_resource_properties(dataset, RESOURCE) def test_reload_w_alternate_client(self): - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) RESOURCE = self._makeResource() conn1 = _Connection() CLIENT1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - dataset = self._make_one(self.DS_NAME, client=CLIENT1) + dataset = self._make_one(self.DS_ID, client=CLIENT1) dataset.reload(client=CLIENT2) @@ -557,13 +557,13 @@ def test_patch_w_invalid_expiration(self): RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) with self.assertRaises(ValueError): dataset.patch(default_table_expiration_ms='BOGUS') def test_patch_w_bound_client(self): - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) DESCRIPTION = 'DESCRIPTION' TITLE = 'TITLE' RESOURCE = self._makeResource() @@ -571,7 +571,7 @@ def test_patch_w_bound_client(self): RESOURCE['friendlyName'] = TITLE conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) dataset.patch(description=DESCRIPTION, friendly_name=TITLE) @@ -587,7 +587,7 @@ def test_patch_w_bound_client(self): self._verify_resource_properties(dataset, RESOURCE) def test_patch_w_alternate_client(self): - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) DEF_TABLE_EXP = 12345 LOCATION = 'EU' RESOURCE = self._makeResource() @@ -597,7 +597,7 @@ def test_patch_w_alternate_client(self): CLIENT1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - dataset = self._make_one(self.DS_NAME, client=CLIENT1) + dataset = self._make_one(self.DS_ID, client=CLIENT1) dataset.patch(client=CLIENT2, default_table_expiration_ms=DEF_TABLE_EXP, @@ -616,7 +616,7 @@ def test_patch_w_alternate_client(self): self._verify_resource_properties(dataset, RESOURCE) def test_update_w_bound_client(self): - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) DESCRIPTION = 'DESCRIPTION' TITLE = 'TITLE' RESOURCE = self._makeResource() @@ -624,7 +624,7 @@ def test_update_w_bound_client(self): RESOURCE['friendlyName'] = TITLE conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) dataset.description = DESCRIPTION dataset.friendly_name = TITLE @@ -635,7 +635,7 @@ def test_update_w_bound_client(self): self.assertEqual(req['method'], 'PUT') SENT = { 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_NAME}, + {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, 'description': DESCRIPTION, 'friendlyName': TITLE, } @@ -644,7 +644,7 @@ def test_update_w_bound_client(self): self._verify_resource_properties(dataset, RESOURCE) def test_update_w_alternate_client(self): - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) DEF_TABLE_EXP = 12345 LOCATION = 'EU' RESOURCE = self._makeResource() @@ -654,7 +654,7 @@ def test_update_w_alternate_client(self): CLIENT1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - dataset = self._make_one(self.DS_NAME, client=CLIENT1) + dataset = self._make_one(self.DS_ID, client=CLIENT1) dataset.default_table_expiration_ms = DEF_TABLE_EXP dataset.location = LOCATION @@ -667,7 +667,7 @@ def test_update_w_alternate_client(self): self.assertEqual(req['path'], '/%s' % PATH) SENT = { 'datasetReference': - {'projectId': self.PROJECT, 'datasetId': self.DS_NAME}, + {'projectId': self.PROJECT, 'datasetId': self.DS_ID}, 'defaultTableExpirationMs': 12345, 'location': 'EU', } @@ -675,10 +675,10 @@ def test_update_w_alternate_client(self): self._verify_resource_properties(dataset, RESOURCE) def test_delete_w_bound_client(self): - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) dataset.delete() @@ -688,12 +688,12 @@ def test_delete_w_bound_client(self): self.assertEqual(req['path'], '/%s' % PATH) def test_delete_w_alternate_client(self): - PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s' % (self.PROJECT, self.DS_ID) conn1 = _Connection() CLIENT1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection({}) CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - dataset = self._make_one(self.DS_NAME, client=CLIENT1) + dataset = self._make_one(self.DS_ID, client=CLIENT1) dataset.delete(client=CLIENT2) @@ -708,7 +708,7 @@ def test_list_tables_empty(self): conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) iterator = dataset.list_tables() self.assertIs(iterator.dataset, dataset) @@ -721,7 +721,7 @@ def test_list_tables_empty(self): self.assertEqual(len(conn._requested), 1) req = conn._requested[0] self.assertEqual(req['method'], 'GET') - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) self.assertEqual(req['path'], '/%s' % PATH) def test_list_tables_defaults(self): @@ -730,21 +730,21 @@ def test_list_tables_defaults(self): TABLE_1 = 'table_one' TABLE_2 = 'table_two' - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) TOKEN = 'TOKEN' DATA = { 'nextPageToken': TOKEN, 'tables': [ {'kind': 'bigquery#table', - 'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_1), + 'id': '%s:%s.%s' % (self.PROJECT, self.DS_ID, TABLE_1), 'tableReference': {'tableId': TABLE_1, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'projectId': self.PROJECT}, 'type': 'TABLE'}, {'kind': 'bigquery#table', - 'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_2), + 'id': '%s:%s.%s' % (self.PROJECT, self.DS_ID, TABLE_2), 'tableReference': {'tableId': TABLE_2, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'projectId': self.PROJECT}, 'type': 'TABLE'}, ] @@ -752,7 +752,7 @@ def test_list_tables_defaults(self): conn = _Connection(DATA) client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) iterator = dataset.list_tables() self.assertIs(iterator.dataset, dataset) @@ -778,20 +778,20 @@ def test_list_tables_explicit(self): TABLE_1 = 'table_one' TABLE_2 = 'table_two' - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) TOKEN = 'TOKEN' DATA = { 'tables': [ {'kind': 'bigquery#dataset', - 'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_1), + 'id': '%s:%s.%s' % (self.PROJECT, self.DS_ID, TABLE_1), 'tableReference': {'tableId': TABLE_1, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'projectId': self.PROJECT}, 'type': 'TABLE'}, {'kind': 'bigquery#dataset', - 'id': '%s:%s.%s' % (self.PROJECT, self.DS_NAME, TABLE_2), + 'id': '%s:%s.%s' % (self.PROJECT, self.DS_ID, TABLE_2), 'tableReference': {'tableId': TABLE_2, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'projectId': self.PROJECT}, 'type': 'TABLE'}, ] @@ -799,7 +799,7 @@ def test_list_tables_explicit(self): conn = _Connection(DATA) client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) iterator = dataset.list_tables(max_results=3, page_token=TOKEN) self.assertIs(iterator.dataset, dataset) @@ -826,7 +826,7 @@ def test_table_wo_schema(self): conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) table = dataset.table('table_name') self.assertIsInstance(table, Table) self.assertEqual(table.name, 'table_name') @@ -839,7 +839,7 @@ def test_table_w_schema(self): conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) - dataset = self._make_one(self.DS_NAME, client=client) + dataset = self._make_one(self.DS_ID, client=client) full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') age = SchemaField('age', 'INTEGER', mode='REQUIRED') table = dataset.table('table_name', schema=[full_name, age]) diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index a4b96470c2e7..09b57d7b7457 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -82,7 +82,7 @@ def test_missing_reason(self): class _Base(object): PROJECT = 'project' SOURCE1 = 'http://example.com/source1.csv' - DS_NAME = 'datset_name' + DS_ID = 'datset_id' TABLE_NAME = 'table_name' JOB_NAME = 'job_name' @@ -206,7 +206,7 @@ def _makeResource(self, started=False, ended=False): config['sourceUris'] = [self.SOURCE1] config['destinationTable'] = { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME, } @@ -275,7 +275,7 @@ def _verifyResourceProperties(self, job, resource): table_ref = config['destinationTable'] self.assertEqual(job.destination.project, table_ref['projectId']) - self.assertEqual(job.destination.dataset_name, table_ref['datasetId']) + self.assertEqual(job.destination.dataset_id, table_ref['datasetId']) self.assertEqual(job.destination.name, table_ref['tableId']) if 'fieldDelimiter' in config: @@ -519,7 +519,7 @@ def test_from_api_repr_missing_config(self): self._setUpConstants() client = _Client(self.PROJECT) RESOURCE = { - 'id': '%s:%s' % (self.PROJECT, self.DS_NAME), + 'id': '%s:%s' % (self.PROJECT, self.DS_ID), 'jobReference': { 'projectId': self.PROJECT, 'jobId': self.JOB_NAME, @@ -543,7 +543,7 @@ def test_from_api_repr_bare(self): 'sourceUris': [self.SOURCE1], 'destinationTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME, }, } @@ -603,7 +603,7 @@ def test_begin_w_bound_client(self): 'sourceUris': [self.SOURCE1], 'destinationTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME, }, }, @@ -638,7 +638,7 @@ def test_begin_w_autodetect(self): 'sourceUris': [self.SOURCE1], 'destinationTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME, }, 'autodetect': True @@ -662,7 +662,7 @@ def test_begin_w_alternate_client(self): 'sourceUris': [self.SOURCE1], 'destinationTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME, }, 'allowJaggedRows': True, @@ -848,12 +848,12 @@ def _makeResource(self, started=False, ended=False): config = resource['configuration']['copy'] config['sourceTables'] = [{ 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.SOURCE_TABLE, }] config['destinationTable'] = { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.DESTINATION_TABLE, } @@ -866,7 +866,7 @@ def _verifyResourceProperties(self, job, resource): table_ref = config['destinationTable'] self.assertEqual(job.destination.project, table_ref['projectId']) - self.assertEqual(job.destination.dataset_name, table_ref['datasetId']) + self.assertEqual(job.destination.dataset_id, table_ref['datasetId']) self.assertEqual(job.destination.name, table_ref['tableId']) sources = config.get('sourceTables') @@ -875,7 +875,7 @@ def _verifyResourceProperties(self, job, resource): self.assertEqual(len(sources), len(job.sources)) for table_ref, table in zip(sources, job.sources): self.assertEqual(table.project, table_ref['projectId']) - self.assertEqual(table.dataset_name, table_ref['datasetId']) + self.assertEqual(table.dataset_id, table_ref['datasetId']) self.assertEqual(table.name, table_ref['tableId']) if 'createDisposition' in config: @@ -921,7 +921,7 @@ def test_from_api_repr_missing_config(self): self._setUpConstants() client = _Client(self.PROJECT) RESOURCE = { - 'id': '%s:%s' % (self.PROJECT, self.DS_NAME), + 'id': '%s:%s' % (self.PROJECT, self.DS_ID), 'jobReference': { 'projectId': self.PROJECT, 'jobId': self.JOB_NAME, @@ -944,12 +944,12 @@ def test_from_api_repr_bare(self): 'copy': { 'sourceTables': [{ 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.SOURCE_TABLE, }], 'destinationTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.DESTINATION_TABLE, }, } @@ -973,12 +973,12 @@ def test_from_api_repr_w_sourcetable(self): 'copy': { 'sourceTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.SOURCE_TABLE, }, 'destinationTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.DESTINATION_TABLE, }, } @@ -1002,7 +1002,7 @@ def test_from_api_repr_wo_sources(self): 'copy': { 'destinationTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.DESTINATION_TABLE, }, } @@ -1051,12 +1051,12 @@ def test_begin_w_bound_client(self): 'copy': { 'sourceTables': [{ 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.SOURCE_TABLE }], 'destinationTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.DESTINATION_TABLE, }, }, @@ -1071,12 +1071,12 @@ def test_begin_w_alternate_client(self): COPY_CONFIGURATION = { 'sourceTables': [{ 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.SOURCE_TABLE, }], 'destinationTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.DESTINATION_TABLE, }, 'createDisposition': 'CREATE_NEVER', @@ -1203,7 +1203,7 @@ def _makeResource(self, started=False, ended=False): config = resource['configuration']['extract'] config['sourceTable'] = { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.SOURCE_TABLE, } config['destinationUris'] = [self.DESTINATION_URI] @@ -1218,7 +1218,7 @@ def _verifyResourceProperties(self, job, resource): table_ref = config['sourceTable'] self.assertEqual(job.source.project, table_ref['projectId']) - self.assertEqual(job.source.dataset_name, table_ref['datasetId']) + self.assertEqual(job.source.dataset_id, table_ref['datasetId']) self.assertEqual(job.source.name, table_ref['tableId']) if 'compression' in config: @@ -1295,7 +1295,7 @@ def test_from_api_repr_missing_config(self): self._setUpConstants() client = _Client(self.PROJECT) RESOURCE = { - 'id': '%s:%s' % (self.PROJECT, self.DS_NAME), + 'id': '%s:%s' % (self.PROJECT, self.DS_ID), 'jobReference': { 'projectId': self.PROJECT, 'jobId': self.JOB_NAME, @@ -1318,7 +1318,7 @@ def test_from_api_repr_bare(self): 'extract': { 'sourceTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.SOURCE_TABLE, }, 'destinationUris': [self.DESTINATION_URI], @@ -1369,7 +1369,7 @@ def test_begin_w_bound_client(self): 'extract': { 'sourceTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.SOURCE_TABLE }, 'destinationUris': [self.DESTINATION_URI], @@ -1385,7 +1385,7 @@ def test_begin_w_alternate_client(self): EXTRACT_CONFIGURATION = { 'sourceTable': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.SOURCE_TABLE, }, 'destinationUris': [self.DESTINATION_URI], @@ -1604,7 +1604,7 @@ def _verifyResourceProperties(self, job, resource): dataset = job.default_dataset ds_ref = { 'projectId': dataset.project, - 'datasetId': dataset.name, + 'datasetId': dataset.dataset_id, } self.assertEqual(ds_ref, query_config['defaultDataset']) else: @@ -1613,7 +1613,7 @@ def _verifyResourceProperties(self, job, resource): table = job.destination tb_ref = { 'projectId': table.project, - 'datasetId': table.dataset_name, + 'datasetId': table.dataset_id, 'tableId': table.name } self.assertEqual(tb_ref, query_config['destinationTable']) @@ -1687,7 +1687,7 @@ def test_from_api_repr_missing_config(self): self._setUpConstants() client = _Client(self.PROJECT) RESOURCE = { - 'id': '%s:%s' % (self.PROJECT, self.DS_NAME), + 'id': '%s:%s' % (self.PROJECT, self.DS_ID), 'jobReference': { 'projectId': self.PROJECT, 'jobId': self.JOB_NAME, @@ -1723,7 +1723,7 @@ def test_from_api_repr_w_properties(self): query_config['writeDisposition'] = 'WRITE_TRUNCATE' query_config['destinationTable'] = { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.DESTINATION_TABLE, } klass = self._get_target_class() @@ -1936,21 +1936,21 @@ def test_referenced_tables(self): self.assertIsInstance(local1, Table) self.assertEqual(local1.name, 'local1') self.assertIsInstance(local1._dataset, Dataset) - self.assertEqual(local1.dataset_name, 'dataset') + self.assertEqual(local1.dataset_id, 'dataset') self.assertEqual(local1.project, self.PROJECT) self.assertIs(local1._dataset._client, client) self.assertIsInstance(local2, Table) self.assertEqual(local2.name, 'local2') self.assertIsInstance(local2._dataset, Dataset) - self.assertEqual(local2.dataset_name, 'dataset') + self.assertEqual(local2.dataset_id, 'dataset') self.assertEqual(local2.project, self.PROJECT) self.assertIs(local2._dataset._client, client) self.assertIsInstance(remote, Table) self.assertEqual(remote.name, 'other-table') self.assertIsInstance(remote._dataset, Dataset) - self.assertEqual(remote.dataset_name, 'other-dataset') + self.assertEqual(remote.dataset_id, 'other-dataset') self.assertEqual(remote.project, 'other-project-123') self.assertIs(remote._dataset._client, client) @@ -2128,7 +2128,7 @@ def test_begin_w_bound_client(self): from google.cloud.bigquery.dataset import Dataset PATH = '/projects/%s/jobs' % (self.PROJECT,) - DS_NAME = 'DATASET' + DS_ID = 'DATASET' RESOURCE = self._makeResource() # Ensure None for missing server-set props del RESOURCE['statistics']['creationTime'] @@ -2139,7 +2139,7 @@ def test_begin_w_bound_client(self): client = _Client(project=self.PROJECT, connection=conn) job = self._make_one(self.JOB_NAME, self.QUERY, client) - job.default_dataset = Dataset(DS_NAME, client) + job.default_dataset = Dataset(DS_ID, client) job.begin() @@ -2159,7 +2159,7 @@ def test_begin_w_bound_client(self): 'query': self.QUERY, 'defaultDataset': { 'projectId': self.PROJECT, - 'datasetId': DS_NAME, + 'datasetId': DS_ID, }, }, }, @@ -2173,7 +2173,7 @@ def test_begin_w_alternate_client(self): PATH = '/projects/%s/jobs' % (self.PROJECT,) TABLE = 'TABLE' - DS_NAME = 'DATASET' + DS_ID = 'DATASET' RESOURCE = self._makeResource(ended=True) QUERY_CONFIGURATION = { 'query': self.QUERY, @@ -2181,11 +2181,11 @@ def test_begin_w_alternate_client(self): 'createDisposition': 'CREATE_NEVER', 'defaultDataset': { 'projectId': self.PROJECT, - 'datasetId': DS_NAME, + 'datasetId': DS_ID, }, 'destinationTable': { 'projectId': self.PROJECT, - 'datasetId': DS_NAME, + 'datasetId': DS_ID, 'tableId': TABLE, }, 'flattenResults': True, @@ -2203,7 +2203,7 @@ def test_begin_w_alternate_client(self): client2 = _Client(project=self.PROJECT, connection=conn2) job = self._make_one(self.JOB_NAME, self.QUERY, client1) - dataset = Dataset(DS_NAME, client1) + dataset = Dataset(DS_ID, client1) table = Table(TABLE, dataset) job.allow_large_results = True @@ -2464,14 +2464,14 @@ def test_reload_w_bound_client(self): from google.cloud.bigquery.dataset import Table PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) - DS_NAME = 'DATASET' + DS_ID = 'DATASET' DEST_TABLE = 'dest_table' RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) job = self._make_one(self.JOB_NAME, None, client) - dataset = Dataset(DS_NAME, client) + dataset = Dataset(DS_ID, client) table = Table(DEST_TABLE, dataset) job.destination = table @@ -2487,13 +2487,13 @@ def test_reload_w_bound_client(self): def test_reload_w_alternate_client(self): PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_NAME) - DS_NAME = 'DATASET' + DS_ID = 'DATASET' DEST_TABLE = 'dest_table' RESOURCE = self._makeResource() q_config = RESOURCE['configuration']['query'] q_config['destinationTable'] = { 'projectId': self.PROJECT, - 'datasetId': DS_NAME, + 'datasetId': DS_ID, 'tableId': DEST_TABLE, } conn1 = _Connection() @@ -2720,8 +2720,8 @@ def project(self): return TestLoadJob.PROJECT @property - def dataset_name(self): - return TestLoadJob.DS_NAME + def dataset_id(self): + return TestLoadJob.DS_ID class _Connection(object): diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index 9e0db94bc6cb..cb481eac1932 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -60,7 +60,7 @@ def test_ctor_defaults(self): class TestTable(unittest.TestCase, _SchemaBase): PROJECT = 'prahj-ekt' - DS_NAME = 'dataset-name' + DS_ID = 'dataset-name' TABLE_NAME = 'table-name' @staticmethod @@ -81,7 +81,7 @@ def _setUpConstants(self): tzinfo=UTC) self.ETAG = 'ETAG' self.TABLE_ID = '%s:%s:%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) self.RESOURCE_URL = 'http://example.com/path/to/resource' self.NUM_BYTES = 12345 self.NUM_ROWS = 67 @@ -92,7 +92,7 @@ def _makeResource(self): 'creationTime': self.WHEN_TS * 1000, 'tableReference': {'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME}, 'schema': {'fields': [ {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, @@ -171,11 +171,11 @@ def test_ctor(self): self.assertEqual(table.name, self.TABLE_NAME) self.assertIs(table._dataset, dataset) self.assertEqual(table.project, self.PROJECT) - self.assertEqual(table.dataset_name, self.DS_NAME) + self.assertEqual(table.dataset_id, self.DS_ID) self.assertEqual( table.path, '/projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME)) + self.PROJECT, self.DS_ID, self.TABLE_NAME)) self.assertEqual(table.schema, []) self.assertIsNone(table.created) @@ -285,9 +285,9 @@ def test_props_set_by_server(self): CREATED = datetime.datetime(2015, 7, 29, 12, 13, 22, tzinfo=UTC) MODIFIED = datetime.datetime(2015, 7, 29, 14, 47, 15, tzinfo=UTC) TABLE_ID = '%s:%s:%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) URL = 'http://example.com/projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) client = _Client(self.PROJECT) dataset = _Dataset(client) table = self._make_one(self.TABLE_NAME, dataset) @@ -421,10 +421,10 @@ def test_from_api_repr_bare(self): client = _Client(self.PROJECT) dataset = _Dataset(client) RESOURCE = { - 'id': '%s:%s:%s' % (self.PROJECT, self.DS_NAME, self.TABLE_NAME), + 'id': '%s:%s:%s' % (self.PROJECT, self.DS_ID, self.TABLE_NAME), 'tableReference': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME, }, 'type': 'TABLE', @@ -445,7 +445,7 @@ def test_from_api_repr_w_properties(self): self._verifyResourceProperties(table, RESOURCE) def test_create_new_day_partitioned_table(self): - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) @@ -461,7 +461,7 @@ def test_create_new_day_partitioned_table(self): SENT = { 'tableReference': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME}, 'timePartitioning': {'type': 'DAY'}, } @@ -471,7 +471,7 @@ def test_create_new_day_partitioned_table(self): def test_create_w_bound_client(self): from google.cloud.bigquery.table import SchemaField - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) @@ -490,7 +490,7 @@ def test_create_w_bound_client(self): SENT = { 'tableReference': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME}, 'schema': {'fields': [ {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, @@ -502,7 +502,7 @@ def test_create_w_bound_client(self): def test_create_w_partition_no_expire(self): from google.cloud.bigquery.table import SchemaField - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) @@ -524,7 +524,7 @@ def test_create_w_partition_no_expire(self): SENT = { 'tableReference': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME}, 'timePartitioning': {'type': 'DAY'}, 'schema': {'fields': [ @@ -537,7 +537,7 @@ def test_create_w_partition_no_expire(self): def test_create_w_partition_and_expire(self): from google.cloud.bigquery.table import SchemaField - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) @@ -559,7 +559,7 @@ def test_create_w_partition_and_expire(self): SENT = { 'tableReference': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME}, 'timePartitioning': {'type': 'DAY', 'expirationMs': 100}, 'schema': {'fields': [ @@ -712,7 +712,7 @@ def test_create_w_alternate_client(self): from google.cloud._helpers import UTC from google.cloud._helpers import _millis - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) DESCRIPTION = 'DESCRIPTION' TITLE = 'TITLE' QUERY = 'select fullname, age from person_ages' @@ -745,7 +745,7 @@ def test_create_w_alternate_client(self): SENT = { 'tableReference': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME}, 'description': DESCRIPTION, 'friendlyName': TITLE, @@ -759,7 +759,7 @@ def test_create_w_missing_output_properties(self): # lacks 'creationTime' / 'lastModifiedTime' from google.cloud.bigquery.table import SchemaField - PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_NAME) + PATH = 'projects/%s/datasets/%s/tables' % (self.PROJECT, self.DS_ID) RESOURCE = self._makeResource() del RESOURCE['creationTime'] del RESOURCE['lastModifiedTime'] @@ -781,7 +781,7 @@ def test_create_w_missing_output_properties(self): SENT = { 'tableReference': { 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME}, 'schema': {'fields': [ {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, @@ -792,7 +792,7 @@ def test_create_w_missing_output_properties(self): def test_exists_miss_w_bound_client(self): PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) conn = _Connection() client = _Client(project=self.PROJECT, connection=conn) dataset = _Dataset(client) @@ -808,7 +808,7 @@ def test_exists_miss_w_bound_client(self): def test_exists_hit_w_alternate_client(self): PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) conn1 = _Connection() client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection({}) @@ -827,7 +827,7 @@ def test_exists_hit_w_alternate_client(self): def test_reload_w_bound_client(self): PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) RESOURCE = self._makeResource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) @@ -844,7 +844,7 @@ def test_reload_w_bound_client(self): def test_reload_w_alternate_client(self): PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) RESOURCE = self._makeResource() conn1 = _Connection() client1 = _Client(project=self.PROJECT, connection=conn1) @@ -874,7 +874,7 @@ def test_patch_w_invalid_expiration(self): def test_patch_w_bound_client(self): PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) DESCRIPTION = 'DESCRIPTION' TITLE = 'TITLE' RESOURCE = self._makeResource() @@ -908,7 +908,7 @@ def test_patch_w_alternate_client(self): from google.cloud.bigquery.table import SchemaField PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) QUERY = 'select fullname, age from person_ages' LOCATION = 'EU' RESOURCE = self._makeResource() @@ -950,7 +950,7 @@ def test_patch_w_schema_None(self): # Simulate deleting schema: not sure if back-end will actually # allow this operation, but the spec says it is optional. PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) DESCRIPTION = 'DESCRIPTION' TITLE = 'TITLE' RESOURCE = self._makeResource() @@ -975,7 +975,7 @@ def test_update_w_bound_client(self): from google.cloud.bigquery.table import SchemaField PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) DESCRIPTION = 'DESCRIPTION' TITLE = 'TITLE' RESOURCE = self._makeResource() @@ -999,7 +999,7 @@ def test_update_w_bound_client(self): SENT = { 'tableReference': {'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME}, 'schema': {'fields': [ {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, @@ -1017,7 +1017,7 @@ def test_update_w_alternate_client(self): from google.cloud._helpers import _millis PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) DEF_TABLE_EXP = 12345 LOCATION = 'EU' QUERY = 'select fullname, age from person_ages' @@ -1051,7 +1051,7 @@ def test_update_w_alternate_client(self): SENT = { 'tableReference': {'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, + 'datasetId': self.DS_ID, 'tableId': self.TABLE_NAME}, 'expirationTime': _millis(self.EXP_TIME), 'location': 'EU', @@ -1062,7 +1062,7 @@ def test_update_w_alternate_client(self): def test_delete_w_bound_client(self): PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) dataset = _Dataset(client) @@ -1077,7 +1077,7 @@ def test_delete_w_bound_client(self): def test_delete_w_alternate_client(self): PATH = 'projects/%s/datasets/%s/tables/%s' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) conn1 = _Connection() client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection({}) @@ -1112,7 +1112,7 @@ def test_fetch_data_w_bound_client(self): from google.cloud.bigquery.table import SchemaField PATH = 'projects/%s/datasets/%s/tables/%s/data' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) WHEN_TS = 1437767599.006 WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( tzinfo=UTC) @@ -1185,7 +1185,7 @@ def test_fetch_data_w_alternate_client(self): from google.cloud.bigquery.table import SchemaField PATH = 'projects/%s/datasets/%s/tables/%s/data' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) MAX = 10 TOKEN = 'TOKEN' DATA = { @@ -1256,7 +1256,7 @@ def test_fetch_data_w_repeated_fields(self): from google.cloud.bigquery.table import SchemaField PATH = 'projects/%s/datasets/%s/tables/%s/data' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) ROWS = 1234 TOKEN = 'TOKEN' DATA = { @@ -1309,7 +1309,7 @@ def test_fetch_data_w_record_schema(self): from google.cloud.bigquery.table import SchemaField PATH = 'projects/%s/datasets/%s/tables/%s/data' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) ROWS = 1234 TOKEN = 'TOKEN' DATA = { @@ -1451,7 +1451,7 @@ def test_insert_data_w_bound_client(self): WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( tzinfo=UTC) PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) dataset = _Dataset(client) @@ -1492,7 +1492,7 @@ def test_insert_data_w_alternate_client(self): from google.cloud.bigquery.table import SchemaField PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) RESPONSE = { 'insertErrors': [ {'index': 1, @@ -1561,7 +1561,7 @@ def test_insert_data_w_repeated_fields(self): from google.cloud.bigquery.table import SchemaField PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) dataset = _Dataset(client) @@ -1597,7 +1597,7 @@ def test_insert_data_w_record_schema(self): from google.cloud.bigquery.table import SchemaField PATH = 'projects/%s/datasets/%s/tables/%s/insertAll' % ( - self.PROJECT, self.DS_NAME, self.TABLE_NAME) + self.PROJECT, self.DS_ID, self.TABLE_NAME) conn = _Connection({}) client = _Client(project=self.PROJECT, connection=conn) dataset = _Dataset(client) @@ -1898,7 +1898,7 @@ def test_upload_file_resumable_metadata(self): 'sourceFormat': config_args['source_format'], 'destinationTable': { 'projectId': table._dataset._client.project, - 'datasetId': table.dataset_name, + 'datasetId': table.dataset_id, 'tableId': table.name, }, 'allowJaggedRows': config_args['allow_jagged_rows'], @@ -2230,8 +2230,9 @@ def _call_fut(source_format, schema, dataset, name): def test_empty_schema(self): source_format = 'AVRO' - dataset = mock.Mock(project='prediction', spec=['name', 'project']) - dataset.name = 'market' # mock.Mock() treats `name` specially. + dataset = mock.Mock(project='prediction', + spec=['dataset_id', 'project']) + dataset.dataset_id = 'market' # mock.Mock() treats `name` specially. table_name = 'chairs' metadata = self._call_fut(source_format, [], dataset, table_name) @@ -2241,7 +2242,7 @@ def test_empty_schema(self): 'sourceFormat': source_format, 'destinationTable': { 'projectId': dataset.project, - 'datasetId': dataset.name, + 'datasetId': dataset.dataset_id, 'tableId': table_name, }, }, @@ -2254,8 +2255,8 @@ def test_with_schema(self): source_format = 'CSV' full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - dataset = mock.Mock(project='blind', spec=['name', 'project']) - dataset.name = 'movie' # mock.Mock() treats `name` specially. + dataset = mock.Mock(project='blind', spec=['dataset_id', 'project']) + dataset.dataset_id = 'movie' # mock.Mock() treats `name` specially. table_name = 'teebull-neem' metadata = self._call_fut( source_format, [full_name], dataset, table_name) @@ -2266,7 +2267,7 @@ def test_with_schema(self): 'sourceFormat': source_format, 'destinationTable': { 'projectId': dataset.project, - 'datasetId': dataset.name, + 'datasetId': dataset.dataset_id, 'tableId': table_name, }, 'schema': { @@ -2309,14 +2310,14 @@ def run(self): class _Dataset(object): - def __init__(self, client, name=TestTable.DS_NAME): + def __init__(self, client, dataset_id=TestTable.DS_ID): self._client = client - self.name = name + self.dataset_id = dataset_id @property def path(self): return '/projects/%s/datasets/%s' % ( - self._client.project, self.name) + self._client.project, self.dataset_id) @property def project(self):