From d674947b72837560b095f5bcaa0d4647a68b355b Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 9 Aug 2022 20:48:49 +0000 Subject: [PATCH 01/13] feat: Add MySQL dialect to bigquerymigration v2 client library PiperOrigin-RevId: 466417698 Source-Link: https://github.com/googleapis/googleapis/commit/2bafaf1c1fbfb3ed3bf10f69dee61c62e0e15142 Source-Link: https://github.com/googleapis/googleapis-gen/commit/15db99606c2f37840475be9dfa5faa32d0844dbf Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTVkYjk5NjA2YzJmMzc4NDA0NzViZTlkZmE1ZmFhMzJkMDg0NGRiZiJ9 --- owl-bot-staging/v2/.eslintignore | 7 + owl-bot-staging/v2/.eslintrc.json | 3 + owl-bot-staging/v2/.gitignore | 14 + owl-bot-staging/v2/.jsdoc.js | 55 + owl-bot-staging/v2/.mocharc.js | 33 + owl-bot-staging/v2/.prettierrc.js | 22 + owl-bot-staging/v2/README.md | 1 + owl-bot-staging/v2/linkinator.config.json | 16 + owl-bot-staging/v2/package.json | 64 + .../migration/v2/migration_entities.proto | 233 +++ .../v2/migration_error_details.proto | 62 + .../migration/v2/migration_metrics.proto | 111 ++ .../migration/v2/migration_service.proto | 245 ++++ .../migration/v2/translation_config.proto | 257 ++++ ...ation_service.create_migration_workflow.js | 64 + ...ation_service.delete_migration_workflow.js | 59 + ...migration_service.get_migration_subtask.js | 63 + ...igration_service.get_migration_workflow.js | 63 + ...gration_service.list_migration_subtasks.js | 83 ++ ...ration_service.list_migration_workflows.js | 77 + ...ration_service.start_migration_workflow.js | 59 + ...ta.google.cloud.bigquery.migration.v2.json | 335 +++++ owl-bot-staging/v2/src/index.ts | 25 + owl-bot-staging/v2/src/v2/gapic_metadata.json | 101 ++ owl-bot-staging/v2/src/v2/index.ts | 19 + .../v2/src/v2/migration_service_client.ts | 1246 ++++++++++++++++ .../v2/migration_service_client_config.json | 71 + .../src/v2/migration_service_proto_list.json | 7 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 32 + owl-bot-staging/v2/system-test/install.ts | 49 + .../v2/test/gapic_migration_service_v2.ts | 1254 +++++++++++++++++ owl-bot-staging/v2/tsconfig.json | 19 + owl-bot-staging/v2/webpack.config.js | 64 + owl-bot-staging/v2alpha/.eslintignore | 7 + owl-bot-staging/v2alpha/.eslintrc.json | 3 + owl-bot-staging/v2alpha/.gitignore | 14 + owl-bot-staging/v2alpha/.jsdoc.js | 55 + owl-bot-staging/v2alpha/.mocharc.js | 33 + owl-bot-staging/v2alpha/.prettierrc.js | 22 + owl-bot-staging/v2alpha/README.md | 1 + .../v2alpha/linkinator.config.json | 16 + owl-bot-staging/v2alpha/package.json | 64 + .../migration/v2alpha/assessment_task.proto | 49 + .../v2alpha/migration_entities.proto | 244 ++++ .../v2alpha/migration_error_details.proto | 62 + .../migration/v2alpha/migration_metrics.proto | 111 ++ .../migration/v2alpha/migration_service.proto | 248 ++++ .../migration/v2alpha/translation_task.proto | 207 +++ ...ation_service.create_migration_workflow.js | 64 + ...ation_service.delete_migration_workflow.js | 59 + ...migration_service.get_migration_subtask.js | 63 + ...igration_service.get_migration_workflow.js | 63 + ...gration_service.list_migration_subtasks.js | 83 ++ ...ration_service.list_migration_workflows.js | 77 + ...ration_service.start_migration_workflow.js | 59 + ...ogle.cloud.bigquery.migration.v2alpha.json | 335 +++++ owl-bot-staging/v2alpha/src/index.ts | 25 + .../v2alpha/src/v2alpha/gapic_metadata.json | 101 ++ owl-bot-staging/v2alpha/src/v2alpha/index.ts | 19 + .../src/v2alpha/migration_service_client.ts | 1246 ++++++++++++++++ .../migration_service_client_config.json | 73 + .../v2alpha/migration_service_proto_list.json | 8 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 32 + .../v2alpha/system-test/install.ts | 49 + .../test/gapic_migration_service_v2alpha.ts | 1254 +++++++++++++++++ owl-bot-staging/v2alpha/tsconfig.json | 19 + owl-bot-staging/v2alpha/webpack.config.js | 64 + 69 files changed, 9696 insertions(+) create mode 100644 owl-bot-staging/v2/.eslintignore create mode 100644 owl-bot-staging/v2/.eslintrc.json create mode 100644 owl-bot-staging/v2/.gitignore create mode 100644 owl-bot-staging/v2/.jsdoc.js create mode 100644 owl-bot-staging/v2/.mocharc.js create mode 100644 owl-bot-staging/v2/.prettierrc.js create mode 100644 owl-bot-staging/v2/README.md create mode 100644 owl-bot-staging/v2/linkinator.config.json create mode 100644 owl-bot-staging/v2/package.json create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json create mode 100644 owl-bot-staging/v2/src/index.ts create mode 100644 owl-bot-staging/v2/src/v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/src/v2/index.ts create mode 100644 owl-bot-staging/v2/src/v2/migration_service_client.ts create mode 100644 owl-bot-staging/v2/src/v2/migration_service_client_config.json create mode 100644 owl-bot-staging/v2/src/v2/migration_service_proto_list.json create mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v2/system-test/install.ts create mode 100644 owl-bot-staging/v2/test/gapic_migration_service_v2.ts create mode 100644 owl-bot-staging/v2/tsconfig.json create mode 100644 owl-bot-staging/v2/webpack.config.js create mode 100644 owl-bot-staging/v2alpha/.eslintignore create mode 100644 owl-bot-staging/v2alpha/.eslintrc.json create mode 100644 owl-bot-staging/v2alpha/.gitignore create mode 100644 owl-bot-staging/v2alpha/.jsdoc.js create mode 100644 owl-bot-staging/v2alpha/.mocharc.js create mode 100644 owl-bot-staging/v2alpha/.prettierrc.js create mode 100644 owl-bot-staging/v2alpha/README.md create mode 100644 owl-bot-staging/v2alpha/linkinator.config.json create mode 100644 owl-bot-staging/v2alpha/package.json create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json create mode 100644 owl-bot-staging/v2alpha/src/index.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/index.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json create mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v2alpha/system-test/install.ts create mode 100644 owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts create mode 100644 owl-bot-staging/v2alpha/tsconfig.json create mode 100644 owl-bot-staging/v2alpha/webpack.config.js diff --git a/owl-bot-staging/v2/.eslintignore b/owl-bot-staging/v2/.eslintignore new file mode 100644 index 0000000..cfc348e --- /dev/null +++ b/owl-bot-staging/v2/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v2/.eslintrc.json b/owl-bot-staging/v2/.eslintrc.json new file mode 100644 index 0000000..7821534 --- /dev/null +++ b/owl-bot-staging/v2/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v2/.gitignore b/owl-bot-staging/v2/.gitignore new file mode 100644 index 0000000..5d32b23 --- /dev/null +++ b/owl-bot-staging/v2/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v2/.jsdoc.js b/owl-bot-staging/v2/.jsdoc.js new file mode 100644 index 0000000..aabe555 --- /dev/null +++ b/owl-bot-staging/v2/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/migration', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v2/.mocharc.js b/owl-bot-staging/v2/.mocharc.js new file mode 100644 index 0000000..481c522 --- /dev/null +++ b/owl-bot-staging/v2/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v2/.prettierrc.js b/owl-bot-staging/v2/.prettierrc.js new file mode 100644 index 0000000..494e147 --- /dev/null +++ b/owl-bot-staging/v2/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v2/README.md b/owl-bot-staging/v2/README.md new file mode 100644 index 0000000..4e0341c --- /dev/null +++ b/owl-bot-staging/v2/README.md @@ -0,0 +1 @@ +Migration: Nodejs Client diff --git a/owl-bot-staging/v2/linkinator.config.json b/owl-bot-staging/v2/linkinator.config.json new file mode 100644 index 0000000..befd23c --- /dev/null +++ b/owl-bot-staging/v2/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2/package.json b/owl-bot-staging/v2/package.json new file mode 100644 index 0000000..6b38f7b --- /dev/null +++ b/owl-bot-staging/v2/package.json @@ -0,0 +1,64 @@ +{ + "name": "@google-cloud/migration", + "version": "0.1.0", + "description": "Migration client for Node.js", + "repository": "googleapis/nodejs-migration", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google migration", + "migration", + "migration service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^3.1.1" + }, + "devDependencies": { + "@types/mocha": "^9.1.0", + "@types/node": "^16.0.0", + "@types/sinon": "^10.0.8", + "c8": "^7.11.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.7", + "jsdoc-fresh": "^2.0.0", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^3.0.0", + "mocha": "^10.0.0", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^9.2.6", + "typescript": "^4.5.5", + "webpack": "^5.67.0", + "webpack-cli": "^4.9.1" + }, + "engines": { + "node": ">=v12" + } +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto new file mode 100644 index 0000000..7d77bae --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto @@ -0,0 +1,233 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2/migration_metrics.proto"; +import "google/cloud/bigquery/migration/v2/translation_config.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. + // The ID is server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // The details of the task. + oneof task_details { + // Task configuration for Batch SQL Translation. + TranslationConfigDetails translation_config_details = 14; + } + + // Output only. Immutable. The unique identifier for the migration task. The + // ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be one of the supported task types: + // Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ, + // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, + // Translation_Snowflake2BQ, Translation_Netezza2BQ, + // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, + // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. + string type = 2; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in + // FAILED state. + google.rpc.ErrorInfo processing_error = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID + // is server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in + // FAILED state. + google.rpc.ErrorInfo processing_error = 6 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while + // processing the subtask. Presence of error details does not mean that the + // subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto new file mode 100644 index 0000000..199e2db --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto new file mode 100644 index 0000000..e52fead --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately `+/-9.2x10^18`. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately `+/-10^(+/-300)` and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto new file mode 100644 index 0000000..3c1a89e --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto @@ -0,0 +1,245 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2/migration_entities.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto new file mode 100644 index 0000000..994140d --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto @@ -0,0 +1,257 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "TranslationConfigProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// The translation config to capture necessary settings for a translation task +// and subtask. +message TranslationConfigDetails { + // The chosen path where the source for input files will be found. + oneof source_location { + // The Cloud Storage path for a directory of files to translate in a task. + string gcs_source_path = 1; + } + + // The chosen path where the destination for output files will be found. + oneof target_location { + // The Cloud Storage path to write back the corresponding input files to. + string gcs_target_path = 2; + } + + // The dialect of the input files. + Dialect source_dialect = 3; + + // The target dialect for the engine to translate the input to. + Dialect target_dialect = 4; + + // The mapping of full SQL object names from their current state to the + // desired output. + oneof output_name_mapping { + // The mapping of objects to their desired output names in list form. + ObjectNameMappingList name_mapping_list = 5; + } + + // The default source environment values for the translation. + SourceEnv source_env = 6; +} + +// The possible dialect options for translation. +message Dialect { + // The possible dialect options that this message represents. + oneof dialect_value { + // The BigQuery dialect + BigQueryDialect bigquery_dialect = 1; + + // The HiveQL dialect + HiveQLDialect hiveql_dialect = 2; + + // The Redshift dialect + RedshiftDialect redshift_dialect = 3; + + // The Teradata dialect + TeradataDialect teradata_dialect = 4; + + // The Oracle dialect + OracleDialect oracle_dialect = 5; + + // The SparkSQL dialect + SparkSQLDialect sparksql_dialect = 6; + + // The Snowflake dialect + SnowflakeDialect snowflake_dialect = 7; + + // The Netezza dialect + NetezzaDialect netezza_dialect = 8; + + // The Azure Synapse dialect + AzureSynapseDialect azure_synapse_dialect = 9; + + // The Vertica dialect + VerticaDialect vertica_dialect = 10; + + // The SQL Server dialect + SQLServerDialect sql_server_dialect = 11; + + // The Postgresql dialect + PostgresqlDialect postgresql_dialect = 12; + + // The Presto dialect + PrestoDialect presto_dialect = 13; + + // The MySQL dialect + MySQLDialect mysql_dialect = 14; + } +} + +// The dialect definition for BigQuery. +message BigQueryDialect {} + +// The dialect definition for HiveQL. +message HiveQLDialect {} + +// The dialect definition for Redshift. +message RedshiftDialect {} + +// The dialect definition for Teradata. +message TeradataDialect { + // The sub-dialect options for Teradata. + enum Mode { + // Unspecified mode. + MODE_UNSPECIFIED = 0; + + // Teradata SQL mode. + SQL = 1; + + // BTEQ mode (which includes SQL). + BTEQ = 2; + } + + // Which Teradata sub-dialect mode the user specifies. + Mode mode = 1; +} + +// The dialect definition for Oracle. +message OracleDialect {} + +// The dialect definition for SparkSQL. +message SparkSQLDialect {} + +// The dialect definition for Snowflake. +message SnowflakeDialect {} + +// The dialect definition for Netezza. +message NetezzaDialect {} + +// The dialect definition for Azure Synapse. +message AzureSynapseDialect {} + +// The dialect definition for Vertica. +message VerticaDialect {} + +// The dialect definition for SQL Server. +message SQLServerDialect {} + +// The dialect definition for Postgresql. +message PostgresqlDialect {} + +// The dialect definition for Presto. +message PrestoDialect {} + +// The dialect definition for MySQL. +message MySQLDialect {} + +// Represents a map of name mappings using a list of key:value proto messages of +// existing name to desired output name. +message ObjectNameMappingList { + // The elements of the object name map. + repeated ObjectNameMapping name_map = 1; +} + +// Represents a key-value pair of NameMappingKey to NameMappingValue to +// represent the mapping of SQL names from the input value to desired output. +message ObjectNameMapping { + // The name of the object in source that is being mapped. + NameMappingKey source = 1; + + // The desired target name of the object that is being mapped. + NameMappingValue target = 2; +} + +// The potential components of a full name mapping that will be mapped +// during translation in the source data warehouse. +message NameMappingKey { + // The type of the object that is being mapped. + enum Type { + // Unspecified name mapping type. + TYPE_UNSPECIFIED = 0; + + // The object being mapped is a database. + DATABASE = 1; + + // The object being mapped is a schema. + SCHEMA = 2; + + // The object being mapped is a relation. + RELATION = 3; + + // The object being mapped is an attribute. + ATTRIBUTE = 4; + + // The object being mapped is a relation alias. + RELATION_ALIAS = 5; + + // The object being mapped is a an attribute alias. + ATTRIBUTE_ALIAS = 6; + + // The object being mapped is a function. + FUNCTION = 7; + } + + // The type of object that is being mapped. + Type type = 1; + + // The database name (BigQuery project ID equivalent in the source data + // warehouse). + string database = 2; + + // The schema name (BigQuery dataset equivalent in the source data warehouse). + string schema = 3; + + // The relation name (BigQuery table or view equivalent in the source data + // warehouse). + string relation = 4; + + // The attribute name (BigQuery column equivalent in the source data + // warehouse). + string attribute = 5; +} + +// The potential components of a full name mapping that will be mapped +// during translation in the target data warehouse. +message NameMappingValue { + // The database name (BigQuery project ID equivalent in the target data + // warehouse). + string database = 1; + + // The schema name (BigQuery dataset equivalent in the target data warehouse). + string schema = 2; + + // The relation name (BigQuery table or view equivalent in the target data + // warehouse). + string relation = 3; + + // The attribute name (BigQuery column equivalent in the target data + // warehouse). + string attribute = 4; +} + +// Represents the default source environment values for the translation. +message SourceEnv { + // The default database name to fully qualify SQL objects when their database + // name is missing. + string default_database = 1; + + // The schema search path. When SQL objects are missing schema name, + // translation engine will search through this list to find the value. + repeated string schema_search_path = 2; +} diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js new file mode 100644 index 0000000..9aed895 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js @@ -0,0 +1,64 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, migrationWorkflow) { + // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + */ + // const parent = 'abc123' + /** + * Required. The migration workflow to create. + */ + // const migrationWorkflow = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callCreateMigrationWorkflow() { + // Construct request + const request = { + parent, + migrationWorkflow, + }; + + // Run request + const response = await migrationClient.createMigrationWorkflow(request); + console.log(response); + } + + callCreateMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js new file mode 100644 index 0000000..aeda293 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js @@ -0,0 +1,59 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callDeleteMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.deleteMigrationWorkflow(request); + console.log(response); + } + + callDeleteMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js new file mode 100644 index 0000000..df6f7d2 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js @@ -0,0 +1,63 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + */ + // const name = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationSubtask() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationSubtask(request); + console.log(response); + } + + callGetMigrationSubtask(); + // [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js new file mode 100644 index 0000000..72f7565 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js @@ -0,0 +1,63 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationWorkflow(request); + console.log(response); + } + + callGetMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js new file mode 100644 index 0000000..95e5833 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js @@ -0,0 +1,83 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const parent = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + /** + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + /** + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + */ + // const filter = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationSubtasks() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationSubtasksAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationSubtasks(); + // [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js new file mode 100644 index 0000000..63402d4 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js @@ -0,0 +1,77 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + */ + // const parent = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + /** + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationWorkflows() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationWorkflowsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationWorkflows(); + // [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js new file mode 100644 index 0000000..76c614c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js @@ -0,0 +1,59 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callStartMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.startMigrationWorkflow(request); + console.log(response); + } + + callStartMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json new file mode 100644 index 0000000..c5cd62c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json @@ -0,0 +1,335 @@ +{ + "clientLibrary": { + "name": "nodejs-migration", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2", + "version": "v2" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", + "title": "MigrationService createMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Creates a migration workflow.", + "canonical": true, + "file": "migration_service.create_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 56, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "migration_workflow", + "type": ".google.cloud.bigquery.migration.v2.MigrationWorkflow" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", + "title": "MigrationService getMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration workflow.", + "canonical": true, + "file": "migration_service.get_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 55, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", + "title": "MigrationService listMigrationWorkflows Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration workflow.", + "canonical": true, + "file": "migration_service.list_migration_workflows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 69, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", + "title": "MigrationService deleteMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Deletes a migration workflow by name.", + "canonical": true, + "file": "migration_service.delete_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", + "title": "MigrationService startMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", + "canonical": true, + "file": "migration_service.start_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", + "title": "MigrationService getMigrationSubtask Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration subtask.", + "canonical": true, + "file": "migration_service.get_migration_subtask.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 55, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationSubtask", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", + "title": "MigrationService listMigrationSubtasks Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration subtasks.", + "canonical": true, + "file": "migration_service.list_migration_subtasks.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 75, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v2/src/index.ts b/owl-bot-staging/v2/src/index.ts new file mode 100644 index 0000000..35a8fd9 --- /dev/null +++ b/owl-bot-staging/v2/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v2 from './v2'; +const MigrationServiceClient = v2.MigrationServiceClient; +type MigrationServiceClient = v2.MigrationServiceClient; +export {v2, MigrationServiceClient}; +export default {v2, MigrationServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v2/src/v2/gapic_metadata.json b/owl-bot-staging/v2/src/v2/gapic_metadata.json new file mode 100644 index 0000000..e0aa12f --- /dev/null +++ b/owl-bot-staging/v2/src/v2/gapic_metadata.json @@ -0,0 +1,101 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.migration.v2", + "libraryPackage": "@google-cloud/migration", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/src/v2/index.ts b/owl-bot-staging/v2/src/v2/index.ts new file mode 100644 index 0000000..f75d208 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2/src/v2/migration_service_client.ts b/owl-bot-staging/v2/src/v2/migration_service_client.ts new file mode 100644 index 0000000..20f6e71 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_client.ts @@ -0,0 +1,1246 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; + +import { Transform } from 'stream'; +import { RequestType } from 'google-gax/build/src/apitypes'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v2/migration_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './migration_service_client_config.json'; + +const version = require('../../../package.json').version; + +/** + * Service to handle EDW migrations. + * @class + * @memberof v2 + */ +export class MigrationServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + migrationServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MigrationServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof MigrationServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' + ), + migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listMigrationWorkflows: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), + listMigrationSubtasks: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.migration.v2.MigrationService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.migrationServiceStub) { + return this.migrationServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.migration.v2.MigrationService. + this.migrationServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2.MigrationService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.migration.v2.MigrationService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const migrationServiceStubMethods = + ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; + for (const methodName of migrationServiceStubMethods) { + const callPromise = this.migrationServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.migrationServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates a migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param {google.cloud.bigquery.migration.v2.MigrationWorkflow} request.migrationWorkflow + * Required. The migration workflow to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.create_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async + */ + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.get_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async + */ + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationWorkflow(request, options, callback); + } +/** + * Deletes a migration workflow by name. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.delete_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async + */ + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); + } +/** + * Starts a previously created migration workflow. I.e., the state transitions + * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + * An error will be signaled if the state is anything other than DRAFT or + * RUNNING. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.start_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async + */ + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.startMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration subtask. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.get_migration_subtask.js + * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async + */ + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationSubtask(request, options, callback); + } + + /** + * Lists previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse + ]>; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationWorkflows(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflowsStream( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.createStream( + this.innerApiCalls.listMigrationWorkflows as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationWorkflows`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.list_migration_workflows.js + * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async + */ + listMigrationWorkflowsAsync( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.asyncIterate( + this.innerApiCalls['listMigrationWorkflows'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + /** + * Lists previously created migration subtasks. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse + ]>; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationSubtasks(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasksStream( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.createStream( + this.innerApiCalls.listMigrationSubtasks as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationSubtasks`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.list_migration_subtasks.js + * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async + */ + listMigrationSubtasksAsync( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.asyncIterate( + this.innerApiCalls['listMigrationSubtasks'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified migrationSubtask resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @param {string} subtask + * @returns {string} Resource name string. + */ + migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { + return this.pathTemplates.migrationSubtaskPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + subtask: subtask, + }); + } + + /** + * Parse the project from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; + } + + /** + * Parse the location from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; + } + + /** + * Parse the workflow from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; + } + + /** + * Parse the subtask from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the subtask. + */ + matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; + } + + /** + * Return a fully-qualified migrationWorkflow resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @returns {string} Resource name string. + */ + migrationWorkflowPath(project:string,location:string,workflow:string) { + return this.pathTemplates.migrationWorkflowPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + }); + } + + /** + * Parse the project from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; + } + + /** + * Parse the location from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; + } + + /** + * Parse the workflow from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.migrationServiceStub && !this._terminated) { + return this.migrationServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v2/src/v2/migration_service_client_config.json b/owl-bot-staging/v2/src/v2/migration_service_client_config.json new file mode 100644 index 0000000..5832815 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_client_config.json @@ -0,0 +1,71 @@ +{ + "interfaces": { + "google.cloud.bigquery.migration.v2.MigrationService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 10000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationWorkflows": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "DeleteMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "GetMigrationSubtask": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ListMigrationSubtasks": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json new file mode 100644 index 0000000..57df7ab --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json @@ -0,0 +1,7 @@ +[ + "../../protos/google/cloud/bigquery/migration/v2/migration_entities.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_error_details.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_metrics.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_service.proto", + "../../protos/google/cloud/bigquery/migration/v2/translation_config.proto" +] diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js new file mode 100644 index 0000000..aa2c893 --- /dev/null +++ b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const migration = require('@google-cloud/migration'); + +function main() { + const migrationServiceClient = new migration.MigrationServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts new file mode 100644 index 0000000..0afe940 --- /dev/null +++ b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,32 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {MigrationServiceClient} from '@google-cloud/migration'; + +// check that the client class type name can be used +function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const migrationServiceClient = new MigrationServiceClient(); + doStuffWithMigrationServiceClient(migrationServiceClient); +} + +main(); diff --git a/owl-bot-staging/v2/system-test/install.ts b/owl-bot-staging/v2/system-test/install.ts new file mode 100644 index 0000000..8ec4522 --- /dev/null +++ b/owl-bot-staging/v2/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import { packNTest } from 'pack-n-play'; +import { readFileSync } from 'fs'; +import { describe, it } from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts new file mode 100644 index 0000000..eb05cd6 --- /dev/null +++ b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts @@ -0,0 +1,1254 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import { describe, it } from 'mocha'; +import * as migrationserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v2.MigrationServiceClient', () => { + it('has servicePath', () => { + const servicePath = migrationserviceModule.v2.MigrationServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = migrationserviceModule.v2.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('createMigrationWorkflow', () => { + it('invokes createMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.createMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationWorkflow', () => { + it('invokes getMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + }); + }); + + describe('deleteMigrationWorkflow', () => { + it('invokes deleteMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.deleteMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + }); + }); + + describe('startMigrationWorkflow', () => { + it('invokes startMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.startMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationSubtask', () => { + it('invokes getMigrationSubtask without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationSubtask(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationSubtask( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationSubtask with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + }); + }); + + describe('listMigrationWorkflows', () => { + it('invokes listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationWorkflows(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflows without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationWorkflows( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationWorkflows(request), expectedError); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflowsStream without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationWorkflowsStream with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; + const iterable = client.listMigrationWorkflowsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationWorkflowsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('listMigrationSubtasks', () => { + it('invokes listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationSubtasks(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasks without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationSubtasks( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationSubtasks(request), expectedError); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasksStream without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationSubtasksStream with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; + const iterable = client.listMigrationSubtasksAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationSubtasksAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationSubtask', () => { + const fakePath = "/rendered/path/migrationSubtask"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + subtask: "subtaskValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationSubtaskPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationSubtaskPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationSubtaskPath', () => { + const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationSubtaskName', () => { + const result = client.matchProjectFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationSubtaskName', () => { + const result = client.matchLocationFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationSubtaskName', () => { + const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchSubtaskFromMigrationSubtaskName', () => { + const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "subtaskValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationWorkflow', () => { + const fakePath = "/rendered/path/migrationWorkflow"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationWorkflowPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationWorkflowPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationWorkflowPath', () => { + const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationWorkflowName', () => { + const result = client.matchProjectFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationWorkflowName', () => { + const result = client.matchLocationFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationWorkflowName', () => { + const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v2/tsconfig.json b/owl-bot-staging/v2/tsconfig.json new file mode 100644 index 0000000..c78f1c8 --- /dev/null +++ b/owl-bot-staging/v2/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v2/webpack.config.js b/owl-bot-staging/v2/webpack.config.js new file mode 100644 index 0000000..9178c4b --- /dev/null +++ b/owl-bot-staging/v2/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'MigrationService', + filename: './migration-service.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; diff --git a/owl-bot-staging/v2alpha/.eslintignore b/owl-bot-staging/v2alpha/.eslintignore new file mode 100644 index 0000000..cfc348e --- /dev/null +++ b/owl-bot-staging/v2alpha/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v2alpha/.eslintrc.json b/owl-bot-staging/v2alpha/.eslintrc.json new file mode 100644 index 0000000..7821534 --- /dev/null +++ b/owl-bot-staging/v2alpha/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v2alpha/.gitignore b/owl-bot-staging/v2alpha/.gitignore new file mode 100644 index 0000000..5d32b23 --- /dev/null +++ b/owl-bot-staging/v2alpha/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v2alpha/.jsdoc.js b/owl-bot-staging/v2alpha/.jsdoc.js new file mode 100644 index 0000000..aabe555 --- /dev/null +++ b/owl-bot-staging/v2alpha/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/migration', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v2alpha/.mocharc.js b/owl-bot-staging/v2alpha/.mocharc.js new file mode 100644 index 0000000..481c522 --- /dev/null +++ b/owl-bot-staging/v2alpha/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v2alpha/.prettierrc.js b/owl-bot-staging/v2alpha/.prettierrc.js new file mode 100644 index 0000000..494e147 --- /dev/null +++ b/owl-bot-staging/v2alpha/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v2alpha/README.md b/owl-bot-staging/v2alpha/README.md new file mode 100644 index 0000000..4e0341c --- /dev/null +++ b/owl-bot-staging/v2alpha/README.md @@ -0,0 +1 @@ +Migration: Nodejs Client diff --git a/owl-bot-staging/v2alpha/linkinator.config.json b/owl-bot-staging/v2alpha/linkinator.config.json new file mode 100644 index 0000000..befd23c --- /dev/null +++ b/owl-bot-staging/v2alpha/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2alpha/package.json b/owl-bot-staging/v2alpha/package.json new file mode 100644 index 0000000..6b38f7b --- /dev/null +++ b/owl-bot-staging/v2alpha/package.json @@ -0,0 +1,64 @@ +{ + "name": "@google-cloud/migration", + "version": "0.1.0", + "description": "Migration client for Node.js", + "repository": "googleapis/nodejs-migration", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google migration", + "migration", + "migration service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^3.1.1" + }, + "devDependencies": { + "@types/mocha": "^9.1.0", + "@types/node": "^16.0.0", + "@types/sinon": "^10.0.8", + "c8": "^7.11.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.7", + "jsdoc-fresh": "^2.0.0", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^3.0.0", + "mocha": "^10.0.0", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^9.2.6", + "typescript": "^4.5.5", + "webpack": "^5.67.0", + "webpack-cli": "^4.9.1" + }, + "engines": { + "node": ">=v12" + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto new file mode 100644 index 0000000..0c6ea13 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto @@ -0,0 +1,49 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "AssessmentTaskProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Assessment task config. +message AssessmentTaskDetails { + // Required. The Cloud Storage path for assessment input files. + string input_path = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The BigQuery dataset for output. + string output_dataset = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. An optional Cloud Storage path to write the query logs (which is + // then used as an input path on the translation task) + string querylogs_path = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data source or data warehouse type (eg: TERADATA/REDSHIFT) + // from which the input data is extracted. + string data_source = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// Details for an assessment task orchestration result. +message AssessmentOrchestrationResultDetails { + // Optional. The version used for the output table schemas. + string output_tables_schema_version = 1 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto new file mode 100644 index 0000000..50d4c75 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto @@ -0,0 +1,244 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/assessment_task.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/cloud/bigquery/migration/v2alpha/translation_task.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // The details of the task. + oneof task_details { + // Task configuration for Assessment. + AssessmentTaskDetails assessment_task_details = 12; + + // Task configuration for Batch/Offline SQL Translation. + TranslationTaskDetails translation_task_details = 13; + } + + // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be a supported task type. + string type = 2; + + // DEPRECATED! Use one of the task_details below. + // The details of the task. The type URL must be one of the supported task + // details messages and correspond to the Task's type. + google.protobuf.Any details = 3; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; + + // Output only. Additional information about the orchestration. + MigrationTaskOrchestrationResult orchestration_result = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while processing the + // subtask. Presence of error details does not mean that the subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} + +// Additional information from the orchestrator when it is done with the +// task orchestration. +message MigrationTaskOrchestrationResult { + // Details specific to the task type. + oneof details { + // Details specific to assessment task types. + AssessmentOrchestrationResultDetails assessment_details = 1; + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto new file mode 100644 index 0000000..89dac5e --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto new file mode 100644 index 0000000..ce60dd2 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately +/-9.2x10^18. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately +/-10^(+/-300) and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto new file mode 100644 index 0000000..b5e9014 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto @@ -0,0 +1,248 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2alpha/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto new file mode 100644 index 0000000..bf4b27e --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto @@ -0,0 +1,207 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "TranslationTaskProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Mapping between an input and output file to be translated in a subtask. +message TranslationFileMapping { + // The Cloud Storage path for a file to translation in a subtask. + string input_path = 1; + + // The Cloud Storage path to write back the corresponding input file to. + string output_path = 2; +} + +// The translation task config to capture necessary settings for a translation +// task and subtask. +message TranslationTaskDetails { + // The file encoding types. + enum FileEncoding { + // File encoding setting is not specified. + FILE_ENCODING_UNSPECIFIED = 0; + + // File encoding is UTF_8. + UTF_8 = 1; + + // File encoding is ISO_8859_1. + ISO_8859_1 = 2; + + // File encoding is US_ASCII. + US_ASCII = 3; + + // File encoding is UTF_16. + UTF_16 = 4; + + // File encoding is UTF_16LE. + UTF_16LE = 5; + + // File encoding is UTF_16BE. + UTF_16BE = 6; + } + + // The special token data type. + enum TokenType { + // Token type is not specified. + TOKEN_TYPE_UNSPECIFIED = 0; + + // Token type as string. + STRING = 1; + + // Token type as integer. + INT64 = 2; + + // Token type as numeric. + NUMERIC = 3; + + // Token type as boolean. + BOOL = 4; + + // Token type as float. + FLOAT64 = 5; + + // Token type as date. + DATE = 6; + + // Token type as timestamp. + TIMESTAMP = 7; + } + + // The language specific settings for the translation task. + oneof language_options { + // The Teradata SQL specific settings for the translation task. + TeradataOptions teradata_options = 10; + + // The BTEQ specific settings for the translation task. + BteqOptions bteq_options = 11; + } + + // The Cloud Storage path for translation input files. + string input_path = 1; + + // The Cloud Storage path for translation output files. + string output_path = 2; + + // Cloud Storage files to be processed for translation. + repeated TranslationFileMapping file_paths = 12; + + // The Cloud Storage path to DDL files as table schema to assist semantic + // translation. + string schema_path = 3; + + // The file encoding type. + FileEncoding file_encoding = 4; + + // The settings for SQL identifiers. + IdentifierSettings identifier_settings = 5; + + // The map capturing special tokens to be replaced during translation. The key + // is special token in string. The value is the token data type. This is used + // to translate SQL query template which contains special token as place + // holder. The special token makes a query invalid to parse. This map will be + // applied to annotate those special token with types to let parser understand + // how to parse them into proper structure with type information. + map special_token_map = 6; + + // The filter applied to translation details. + Filter filter = 7; + + // Specifies the exact name of the bigquery table ("dataset.table") to be used + // for surfacing raw translation errors. If the table does not exist, we will + // create it. If it already exists and the schema is the same, we will re-use. + // If the table exists and the schema is different, we will throw an error. + string translation_exception_table = 13; +} + +// The filter applied to fields of translation details. +message Filter { + // The list of prefixes used to exclude processing for input files. + repeated string input_file_exclusion_prefixes = 1; +} + +// Settings related to SQL identifiers. +message IdentifierSettings { + // The identifier case type. + enum IdentifierCase { + // The identifier case is not specified. + IDENTIFIER_CASE_UNSPECIFIED = 0; + + // Identifiers' cases will be kept as the original cases. + ORIGINAL = 1; + + // Identifiers will be in upper cases. + UPPER = 2; + + // Identifiers will be in lower cases. + LOWER = 3; + } + + // The SQL identifier rewrite mode. + enum IdentifierRewriteMode { + // SQL Identifier rewrite mode is unspecified. + IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0; + + // SQL identifiers won't be rewrite. + NONE = 1; + + // All SQL identifiers will be rewrite. + REWRITE_ALL = 2; + } + + // The setting to control output queries' identifier case. + IdentifierCase output_identifier_case = 1; + + // Specifies the rewrite mode for SQL identifiers. + IdentifierRewriteMode identifier_rewrite_mode = 2; +} + +// Teradata SQL specific translation task related settings. +message TeradataOptions { + +} + +// BTEQ translation task related settings. +message BteqOptions { + // Specifies the project and dataset in BigQuery that will be used for + // external table creation during the translation. + DatasetReference project_dataset = 1; + + // The Cloud Storage location to be used as the default path for files that + // are not otherwise specified in the file replacement map. + string default_path_uri = 2; + + // Maps the local paths that are used in BTEQ scripts (the keys) to the paths + // in Cloud Storage that should be used in their stead in the translation (the + // value). + map file_replacement_map = 3; +} + +// Reference to a BigQuery dataset. +message DatasetReference { + // A unique ID for this dataset, without the project name. The ID + // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). + // The maximum length is 1,024 characters. + string dataset_id = 1; + + // The ID of the project containing this dataset. + string project_id = 2; +} diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js new file mode 100644 index 0000000..4004520 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js @@ -0,0 +1,64 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, migrationWorkflow) { + // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + */ + // const parent = 'abc123' + /** + * Required. The migration workflow to create. + */ + // const migrationWorkflow = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callCreateMigrationWorkflow() { + // Construct request + const request = { + parent, + migrationWorkflow, + }; + + // Run request + const response = await migrationClient.createMigrationWorkflow(request); + console.log(response); + } + + callCreateMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js new file mode 100644 index 0000000..04adf0d --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js @@ -0,0 +1,59 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callDeleteMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.deleteMigrationWorkflow(request); + console.log(response); + } + + callDeleteMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js new file mode 100644 index 0000000..8bf28e2 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js @@ -0,0 +1,63 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + */ + // const name = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationSubtask() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationSubtask(request); + console.log(response); + } + + callGetMigrationSubtask(); + // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js new file mode 100644 index 0000000..bbc9e68 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js @@ -0,0 +1,63 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationWorkflow(request); + console.log(response); + } + + callGetMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js new file mode 100644 index 0000000..61a50a0 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js @@ -0,0 +1,83 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const parent = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + /** + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + /** + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + */ + // const filter = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationSubtasks() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationSubtasksAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationSubtasks(); + // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js new file mode 100644 index 0000000..41d06f8 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js @@ -0,0 +1,77 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + */ + // const parent = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + /** + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationWorkflows() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationWorkflowsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationWorkflows(); + // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js new file mode 100644 index 0000000..236300c --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js @@ -0,0 +1,59 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callStartMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.startMigrationWorkflow(request); + console.log(response); + } + + callStartMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json new file mode 100644 index 0000000..e8dd517 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json @@ -0,0 +1,335 @@ +{ + "clientLibrary": { + "name": "nodejs-migration", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2alpha", + "version": "v2alpha" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", + "title": "MigrationService createMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Creates a migration workflow.", + "canonical": true, + "file": "migration_service.create_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 56, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "migration_workflow", + "type": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", + "title": "MigrationService getMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration workflow.", + "canonical": true, + "file": "migration_service.get_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 55, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", + "title": "MigrationService listMigrationWorkflows Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration workflow.", + "canonical": true, + "file": "migration_service.list_migration_workflows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 69, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", + "title": "MigrationService deleteMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Deletes a migration workflow by name.", + "canonical": true, + "file": "migration_service.delete_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", + "title": "MigrationService startMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", + "canonical": true, + "file": "migration_service.start_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", + "title": "MigrationService getMigrationSubtask Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration subtask.", + "canonical": true, + "file": "migration_service.get_migration_subtask.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 55, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationSubtask", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", + "title": "MigrationService listMigrationSubtasks Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration subtasks.", + "canonical": true, + "file": "migration_service.list_migration_subtasks.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 75, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v2alpha/src/index.ts b/owl-bot-staging/v2alpha/src/index.ts new file mode 100644 index 0000000..288e629 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v2alpha from './v2alpha'; +const MigrationServiceClient = v2alpha.MigrationServiceClient; +type MigrationServiceClient = v2alpha.MigrationServiceClient; +export {v2alpha, MigrationServiceClient}; +export default {v2alpha, MigrationServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json new file mode 100644 index 0000000..f751ba9 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json @@ -0,0 +1,101 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.migration.v2alpha", + "libraryPackage": "@google-cloud/migration", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/index.ts b/owl-bot-staging/v2alpha/src/v2alpha/index.ts new file mode 100644 index 0000000..f75d208 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts new file mode 100644 index 0000000..50e1053 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts @@ -0,0 +1,1246 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; + +import { Transform } from 'stream'; +import { RequestType } from 'google-gax/build/src/apitypes'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v2alpha/migration_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './migration_service_client_config.json'; + +const version = require('../../../package.json').version; + +/** + * Service to handle EDW migrations. + * @class + * @memberof v2alpha + */ +export class MigrationServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + migrationServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MigrationServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof MigrationServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' + ), + migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listMigrationWorkflows: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), + listMigrationSubtasks: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.migration.v2alpha.MigrationService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.migrationServiceStub) { + return this.migrationServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.migration.v2alpha.MigrationService. + this.migrationServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2alpha.MigrationService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.migration.v2alpha.MigrationService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const migrationServiceStubMethods = + ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; + for (const methodName of migrationServiceStubMethods) { + const callPromise = this.migrationServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.migrationServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates a migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param {google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} request.migrationWorkflow + * Required. The migration workflow to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.create_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async + */ + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.get_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async + */ + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationWorkflow(request, options, callback); + } +/** + * Deletes a migration workflow by name. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.delete_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async + */ + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); + } +/** + * Starts a previously created migration workflow. I.e., the state transitions + * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + * An error will be signaled if the state is anything other than DRAFT or + * RUNNING. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.start_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async + */ + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.startMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration subtask. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.get_migration_subtask.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async + */ + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationSubtask(request, options, callback); + } + + /** + * Lists previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse + ]>; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationWorkflows(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflowsStream( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.createStream( + this.innerApiCalls.listMigrationWorkflows as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationWorkflows`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.list_migration_workflows.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async + */ + listMigrationWorkflowsAsync( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.asyncIterate( + this.innerApiCalls['listMigrationWorkflows'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + /** + * Lists previously created migration subtasks. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse + ]>; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationSubtasks(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasksStream( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.createStream( + this.innerApiCalls.listMigrationSubtasks as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationSubtasks`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.list_migration_subtasks.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async + */ + listMigrationSubtasksAsync( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.asyncIterate( + this.innerApiCalls['listMigrationSubtasks'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified migrationSubtask resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @param {string} subtask + * @returns {string} Resource name string. + */ + migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { + return this.pathTemplates.migrationSubtaskPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + subtask: subtask, + }); + } + + /** + * Parse the project from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; + } + + /** + * Parse the location from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; + } + + /** + * Parse the workflow from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; + } + + /** + * Parse the subtask from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the subtask. + */ + matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; + } + + /** + * Return a fully-qualified migrationWorkflow resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @returns {string} Resource name string. + */ + migrationWorkflowPath(project:string,location:string,workflow:string) { + return this.pathTemplates.migrationWorkflowPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + }); + } + + /** + * Parse the project from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; + } + + /** + * Parse the location from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; + } + + /** + * Parse the workflow from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.migrationServiceStub && !this._terminated) { + return this.migrationServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json new file mode 100644 index 0000000..2184b83 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json @@ -0,0 +1,73 @@ +{ + "interfaces": { + "google.cloud.bigquery.migration.v2alpha.MigrationService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 10000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetMigrationWorkflow": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationWorkflows": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "DeleteMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartMigrationWorkflow": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "GetMigrationSubtask": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationSubtasks": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + } + } + } + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json new file mode 100644 index 0000000..8e91e42 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json @@ -0,0 +1,8 @@ +[ + "../../protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto" +] diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js new file mode 100644 index 0000000..aa2c893 --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const migration = require('@google-cloud/migration'); + +function main() { + const migrationServiceClient = new migration.MigrationServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts new file mode 100644 index 0000000..0afe940 --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,32 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {MigrationServiceClient} from '@google-cloud/migration'; + +// check that the client class type name can be used +function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const migrationServiceClient = new MigrationServiceClient(); + doStuffWithMigrationServiceClient(migrationServiceClient); +} + +main(); diff --git a/owl-bot-staging/v2alpha/system-test/install.ts b/owl-bot-staging/v2alpha/system-test/install.ts new file mode 100644 index 0000000..8ec4522 --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import { packNTest } from 'pack-n-play'; +import { readFileSync } from 'fs'; +import { describe, it } from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts new file mode 100644 index 0000000..4b80fbb --- /dev/null +++ b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts @@ -0,0 +1,1254 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import { describe, it } from 'mocha'; +import * as migrationserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v2alpha.MigrationServiceClient', () => { + it('has servicePath', () => { + const servicePath = migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('createMigrationWorkflow', () => { + it('invokes createMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.createMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationWorkflow', () => { + it('invokes getMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + }); + }); + + describe('deleteMigrationWorkflow', () => { + it('invokes deleteMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.deleteMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + }); + }); + + describe('startMigrationWorkflow', () => { + it('invokes startMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.startMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationSubtask', () => { + it('invokes getMigrationSubtask without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationSubtask(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationSubtask( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationSubtask with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + }); + }); + + describe('listMigrationWorkflows', () => { + it('invokes listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationWorkflows(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflows without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationWorkflows( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationWorkflows(request), expectedError); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflowsStream without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationWorkflowsStream with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; + const iterable = client.listMigrationWorkflowsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationWorkflowsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('listMigrationSubtasks', () => { + it('invokes listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationSubtasks(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasks without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationSubtasks( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationSubtasks(request), expectedError); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasksStream without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationSubtasksStream with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; + const iterable = client.listMigrationSubtasksAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationSubtasksAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationSubtask', () => { + const fakePath = "/rendered/path/migrationSubtask"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + subtask: "subtaskValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationSubtaskPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationSubtaskPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationSubtaskPath', () => { + const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationSubtaskName', () => { + const result = client.matchProjectFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationSubtaskName', () => { + const result = client.matchLocationFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationSubtaskName', () => { + const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchSubtaskFromMigrationSubtaskName', () => { + const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "subtaskValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationWorkflow', () => { + const fakePath = "/rendered/path/migrationWorkflow"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationWorkflowPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationWorkflowPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationWorkflowPath', () => { + const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationWorkflowName', () => { + const result = client.matchProjectFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationWorkflowName', () => { + const result = client.matchLocationFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationWorkflowName', () => { + const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v2alpha/tsconfig.json b/owl-bot-staging/v2alpha/tsconfig.json new file mode 100644 index 0000000..c78f1c8 --- /dev/null +++ b/owl-bot-staging/v2alpha/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v2alpha/webpack.config.js b/owl-bot-staging/v2alpha/webpack.config.js new file mode 100644 index 0000000..9178c4b --- /dev/null +++ b/owl-bot-staging/v2alpha/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'MigrationService', + filename: './migration-service.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; From fbe2227fd7cf2f9592a143a6b175aac80a9f9a91 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 9 Aug 2022 20:50:30 +0000 Subject: [PATCH 02/13] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- linkinator.config.json | 31 +- owl-bot-staging/v2/.eslintignore | 7 - owl-bot-staging/v2/.eslintrc.json | 3 - owl-bot-staging/v2/.gitignore | 14 - owl-bot-staging/v2/.jsdoc.js | 55 - owl-bot-staging/v2/.mocharc.js | 33 - owl-bot-staging/v2/.prettierrc.js | 22 - owl-bot-staging/v2/README.md | 1 - owl-bot-staging/v2/linkinator.config.json | 16 - owl-bot-staging/v2/package.json | 64 - .../migration/v2/migration_entities.proto | 233 --- .../v2/migration_error_details.proto | 62 - .../migration/v2/migration_metrics.proto | 111 -- .../migration/v2/migration_service.proto | 245 ---- .../migration/v2/translation_config.proto | 257 ---- ...ation_service.create_migration_workflow.js | 64 - ...ation_service.delete_migration_workflow.js | 59 - ...migration_service.get_migration_subtask.js | 63 - ...igration_service.get_migration_workflow.js | 63 - ...gration_service.list_migration_subtasks.js | 83 -- ...ration_service.list_migration_workflows.js | 77 - ...ration_service.start_migration_workflow.js | 59 - ...ta.google.cloud.bigquery.migration.v2.json | 335 ----- owl-bot-staging/v2/src/index.ts | 25 - owl-bot-staging/v2/src/v2/gapic_metadata.json | 101 -- owl-bot-staging/v2/src/v2/index.ts | 19 - .../v2/src/v2/migration_service_client.ts | 1246 ---------------- .../v2/migration_service_client_config.json | 71 - .../src/v2/migration_service_proto_list.json | 7 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 32 - owl-bot-staging/v2/system-test/install.ts | 49 - .../v2/test/gapic_migration_service_v2.ts | 1254 ----------------- owl-bot-staging/v2/tsconfig.json | 19 - owl-bot-staging/v2/webpack.config.js | 64 - owl-bot-staging/v2alpha/.eslintignore | 7 - owl-bot-staging/v2alpha/.eslintrc.json | 3 - owl-bot-staging/v2alpha/.gitignore | 14 - owl-bot-staging/v2alpha/.jsdoc.js | 55 - owl-bot-staging/v2alpha/.mocharc.js | 33 - owl-bot-staging/v2alpha/.prettierrc.js | 22 - owl-bot-staging/v2alpha/README.md | 1 - .../v2alpha/linkinator.config.json | 16 - owl-bot-staging/v2alpha/package.json | 64 - .../migration/v2alpha/assessment_task.proto | 49 - .../v2alpha/migration_entities.proto | 244 ---- .../v2alpha/migration_error_details.proto | 62 - .../migration/v2alpha/migration_metrics.proto | 111 -- .../migration/v2alpha/migration_service.proto | 248 ---- .../migration/v2alpha/translation_task.proto | 207 --- ...ation_service.create_migration_workflow.js | 64 - ...ation_service.delete_migration_workflow.js | 59 - ...migration_service.get_migration_subtask.js | 63 - ...igration_service.get_migration_workflow.js | 63 - ...gration_service.list_migration_subtasks.js | 83 -- ...ration_service.list_migration_workflows.js | 77 - ...ration_service.start_migration_workflow.js | 59 - ...ogle.cloud.bigquery.migration.v2alpha.json | 335 ----- owl-bot-staging/v2alpha/src/index.ts | 25 - .../v2alpha/src/v2alpha/gapic_metadata.json | 101 -- owl-bot-staging/v2alpha/src/v2alpha/index.ts | 19 - .../src/v2alpha/migration_service_client.ts | 1246 ---------------- .../migration_service_client_config.json | 73 - .../v2alpha/migration_service_proto_list.json | 8 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 32 - .../v2alpha/system-test/install.ts | 49 - .../test/gapic_migration_service_v2alpha.ts | 1254 ----------------- owl-bot-staging/v2alpha/tsconfig.json | 19 - owl-bot-staging/v2alpha/webpack.config.js | 64 - .../migration/v2/migration_entities.proto | 2 +- .../migration/v2/translation_config.proto | 18 + protos/protos.d.ts | 272 +++- protos/protos.js | 586 +++++++- protos/protos.json | 26 +- system-test/fixtures/sample/src/index.js | 2 +- system-test/fixtures/sample/src/index.ts | 2 +- 77 files changed, 916 insertions(+), 9719 deletions(-) delete mode 100644 owl-bot-staging/v2/.eslintignore delete mode 100644 owl-bot-staging/v2/.eslintrc.json delete mode 100644 owl-bot-staging/v2/.gitignore delete mode 100644 owl-bot-staging/v2/.jsdoc.js delete mode 100644 owl-bot-staging/v2/.mocharc.js delete mode 100644 owl-bot-staging/v2/.prettierrc.js delete mode 100644 owl-bot-staging/v2/README.md delete mode 100644 owl-bot-staging/v2/linkinator.config.json delete mode 100644 owl-bot-staging/v2/package.json delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json delete mode 100644 owl-bot-staging/v2/src/index.ts delete mode 100644 owl-bot-staging/v2/src/v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/src/v2/index.ts delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_client.ts delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_client_config.json delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_proto_list.json delete mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v2/system-test/install.ts delete mode 100644 owl-bot-staging/v2/test/gapic_migration_service_v2.ts delete mode 100644 owl-bot-staging/v2/tsconfig.json delete mode 100644 owl-bot-staging/v2/webpack.config.js delete mode 100644 owl-bot-staging/v2alpha/.eslintignore delete mode 100644 owl-bot-staging/v2alpha/.eslintrc.json delete mode 100644 owl-bot-staging/v2alpha/.gitignore delete mode 100644 owl-bot-staging/v2alpha/.jsdoc.js delete mode 100644 owl-bot-staging/v2alpha/.mocharc.js delete mode 100644 owl-bot-staging/v2alpha/.prettierrc.js delete mode 100644 owl-bot-staging/v2alpha/README.md delete mode 100644 owl-bot-staging/v2alpha/linkinator.config.json delete mode 100644 owl-bot-staging/v2alpha/package.json delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json delete mode 100644 owl-bot-staging/v2alpha/src/index.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/index.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json delete mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v2alpha/system-test/install.ts delete mode 100644 owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts delete mode 100644 owl-bot-staging/v2alpha/tsconfig.json delete mode 100644 owl-bot-staging/v2alpha/webpack.config.js diff --git a/linkinator.config.json b/linkinator.config.json index ec52446..befd23c 100644 --- a/linkinator.config.json +++ b/linkinator.config.json @@ -1,17 +1,16 @@ { - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://cloud.google.com/nodejs/docs/reference/bigquery-migration/latest", - "https://github.com/googleapis/nodejs-bigquery-migration/blob/master/CHANGELOG.md", - "https://github.com/googleapis/nodejs-bigquery-migration/blob/addSamples/CHANGELOG.md" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} \ No newline at end of file + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2/.eslintignore b/owl-bot-staging/v2/.eslintignore deleted file mode 100644 index cfc348e..0000000 --- a/owl-bot-staging/v2/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v2/.eslintrc.json b/owl-bot-staging/v2/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/owl-bot-staging/v2/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v2/.gitignore b/owl-bot-staging/v2/.gitignore deleted file mode 100644 index 5d32b23..0000000 --- a/owl-bot-staging/v2/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v2/.jsdoc.js b/owl-bot-staging/v2/.jsdoc.js deleted file mode 100644 index aabe555..0000000 --- a/owl-bot-staging/v2/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/migration', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v2/.mocharc.js b/owl-bot-staging/v2/.mocharc.js deleted file mode 100644 index 481c522..0000000 --- a/owl-bot-staging/v2/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v2/.prettierrc.js b/owl-bot-staging/v2/.prettierrc.js deleted file mode 100644 index 494e147..0000000 --- a/owl-bot-staging/v2/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v2/README.md b/owl-bot-staging/v2/README.md deleted file mode 100644 index 4e0341c..0000000 --- a/owl-bot-staging/v2/README.md +++ /dev/null @@ -1 +0,0 @@ -Migration: Nodejs Client diff --git a/owl-bot-staging/v2/linkinator.config.json b/owl-bot-staging/v2/linkinator.config.json deleted file mode 100644 index befd23c..0000000 --- a/owl-bot-staging/v2/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v2/package.json b/owl-bot-staging/v2/package.json deleted file mode 100644 index 6b38f7b..0000000 --- a/owl-bot-staging/v2/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "@google-cloud/migration", - "version": "0.1.0", - "description": "Migration client for Node.js", - "repository": "googleapis/nodejs-migration", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google migration", - "migration", - "migration service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^3.1.1" - }, - "devDependencies": { - "@types/mocha": "^9.1.0", - "@types/node": "^16.0.0", - "@types/sinon": "^10.0.8", - "c8": "^7.11.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.7", - "jsdoc-fresh": "^2.0.0", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^3.0.0", - "mocha": "^10.0.0", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^9.2.6", - "typescript": "^4.5.5", - "webpack": "^5.67.0", - "webpack-cli": "^4.9.1" - }, - "engines": { - "node": ">=v12" - } -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto deleted file mode 100644 index 7d77bae..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto +++ /dev/null @@ -1,233 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2/migration_metrics.proto"; -import "google/cloud/bigquery/migration/v2/translation_config.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationEntitiesProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// A migration workflow which specifies what needs to be done for an EDW -// migration. -message MigrationWorkflow { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}" - }; - - // Possible migration workflow states. - enum State { - // Workflow state is unspecified. - STATE_UNSPECIFIED = 0; - - // Workflow is in draft status, i.e. tasks are not yet eligible for - // execution. - DRAFT = 1; - - // Workflow is running (i.e. tasks are eligible for execution). - RUNNING = 2; - - // Workflow is paused. Tasks currently in progress may continue, but no - // further tasks will be scheduled. - PAUSED = 3; - - // Workflow is complete. There should not be any task in a non-terminal - // state, but if they are (e.g. forced termination), they will not be - // scheduled. - COMPLETED = 4; - } - - // Output only. Immutable. The unique identifier for the migration workflow. - // The ID is server-generated. - // - // Example: `projects/123/locations/us/workflows/345` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The display name of the workflow. This can be set to give a workflow - // a descriptive name. There is no guarantee or enforcement of uniqueness. - string display_name = 6; - - // The tasks in a workflow in a named map. The name (i.e. key) has no - // meaning and is merely a convenient way to address a specific task - // in a workflow. - map tasks = 2; - - // Output only. That status of the workflow. - State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the workflow was created. - google.protobuf.Timestamp create_time = 4; - - // Time when the workflow was last updated. - google.protobuf.Timestamp last_update_time = 5; -} - -// A single task for a migration which has details about the configuration of -// the task. -message MigrationTask { - // Possible states of a migration task. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The task is waiting for orchestration. - PENDING = 1; - - // The task is assigned to an orchestrator. - ORCHESTRATING = 2; - - // The task is running, i.e. its subtasks are ready for execution. - RUNNING = 3; - - // Tha task is paused. Assigned subtasks can continue, but no new subtasks - // will be scheduled. - PAUSED = 4; - - // The task finished successfully. - SUCCEEDED = 5; - - // The task finished unsuccessfully. - FAILED = 6; - } - - // The details of the task. - oneof task_details { - // Task configuration for Batch SQL Translation. - TranslationConfigDetails translation_config_details = 14; - } - - // Output only. Immutable. The unique identifier for the migration task. The - // ID is server-generated. - string id = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The type of the task. This must be one of the supported task types: - // Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ, - // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, - // Translation_Snowflake2BQ, Translation_Netezza2BQ, - // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, - // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. - string type = 2; - - // Output only. The current state of the task. - State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in - // FAILED state. - google.rpc.ErrorInfo processing_error = 5 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the task was created. - google.protobuf.Timestamp create_time = 6; - - // Time when the task was last updated. - google.protobuf.Timestamp last_update_time = 7; -} - -// A subtask for a migration which carries details about the configuration of -// the subtask. The content of the details should not matter to the end user, -// but is a contract between the subtask creator and subtask worker. -message MigrationSubtask { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" - }; - - // Possible states of a migration subtask. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The subtask is ready, i.e. it is ready for execution. - ACTIVE = 1; - - // The subtask is running, i.e. it is assigned to a worker for execution. - RUNNING = 2; - - // The subtask finished successfully. - SUCCEEDED = 3; - - // The subtask finished unsuccessfully. - FAILED = 4; - - // The subtask is paused, i.e., it will not be scheduled. If it was already - // assigned,it might still finish but no new lease renewals will be granted. - PAUSED = 5; - } - - // Output only. Immutable. The resource name for the migration subtask. The ID - // is server-generated. - // - // Example: `projects/123/locations/us/workflows/345/subtasks/678` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The unique ID of the task to which this subtask belongs. - string task_id = 2; - - // The type of the Subtask. The migration service does not check whether this - // is a known type. It is up to the task creator (i.e. orchestrator or worker) - // to ensure it only creates subtasks for which there are compatible workers - // polling for Subtasks. - string type = 3; - - // Output only. The current state of the subtask. - State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in - // FAILED state. - google.rpc.ErrorInfo processing_error = 6 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Provides details to errors and issues encountered while - // processing the subtask. Presence of error details does not mean that the - // subtask failed. - repeated ResourceErrorDetail resource_error_details = 12 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The number or resources with errors. Note: This is not the total - // number of errors as each resource can have more than one error. - // This is used to indicate truncation by having a `resource_error_count` - // that is higher than the size of `resource_error_details`. - int32 resource_error_count = 13; - - // Time when the subtask was created. - google.protobuf.Timestamp create_time = 7; - - // Time when the subtask was last updated. - google.protobuf.Timestamp last_update_time = 8; - - // The metrics for the subtask. - repeated TimeSeries metrics = 11; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto deleted file mode 100644 index 199e2db..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/field_behavior.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationErrorDetailsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// Provides details for errors and the corresponding resources. -message ResourceErrorDetail { - // Required. Information about the resource where the error is located. - google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The error details for the resource. - repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. How many errors there are in total for the resource. Truncation can be - // indicated by having an `error_count` that is higher than the size of - // `error_details`. - int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Provides details for errors, e.g. issues that where encountered when -// processing a subtask. -message ErrorDetail { - // Optional. The exact location within the resource (if applicable). - ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Describes the cause of the error with structured detail. - google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Holds information about where the error is located. -message ErrorLocation { - // Optional. If applicable, denotes the line where the error occurred. A zero value - // means that there is no line information. - int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If applicable, denotes the column where the error occurred. A zero value - // means that there is no columns information. - int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto deleted file mode 100644 index e52fead..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/distribution.proto"; -import "google/api/field_behavior.proto"; -import "google/api/metric.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationMetricsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// The metrics object for a SubTask. -message TimeSeries { - // Required. The name of the metric. - // - // If the metric is not known by the service yet, it will be auto-created. - string metric = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The value type of the time series. - google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric kind of the time series. - // - // If present, it must be the same as the metric kind of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // this field specifies the metric kind of the new descriptor and must be - // either `GAUGE` (the default) or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time does not need to be supplied, but if it is supplied, it must - // equal the end time. For `DELTA` metrics, the start and end time should - // specify a non-zero interval, with subsequent points specifying contiguous - // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end - // time should specify a non-zero interval, with subsequent points specifying - // the same start time and increasing end times, until an event resets the - // cumulative value to zero and sets a new start time for the following - // points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A time interval extending just after a start time through an end time. -// If the start time is the same as the end time, then the interval -// represents a single point in time. -message TimeInterval { - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately `+/-9.2x10^18`. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately `+/-10^(+/-300)` and it has 16 significant digits of - // precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto deleted file mode 100644 index 3c1a89e..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto +++ /dev/null @@ -1,245 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2/migration_entities.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationServiceProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// Service to handle EDW migrations. -service MigrationService { - option (google.api.default_host) = "bigquerymigration.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a migration workflow. - rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - post: "/v2/{parent=projects/*/locations/*}/workflows" - body: "migration_workflow" - }; - option (google.api.method_signature) = "parent,migration_workflow"; - } - - // Gets a previously created migration workflow. - rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - get: "/v2/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration workflow. - rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { - option (google.api.http) = { - get: "/v2/{parent=projects/*/locations/*}/workflows" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a migration workflow by name. - rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Starts a previously created migration workflow. I.e., the state transitions - // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - // An error will be signaled if the state is anything other than DRAFT or - // RUNNING. - rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2/{name=projects/*/locations/*/workflows/*}:start" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Gets a previously created migration subtask. - rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { - option (google.api.http) = { - get: "/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration subtasks. - rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { - option (google.api.http) = { - get: "/v2/{parent=projects/*/locations/*/workflows/*}/subtasks" - }; - option (google.api.method_signature) = "parent"; - } -} - -// Request to create a migration workflow resource. -message CreateMigrationWorkflowRequest { - // Required. The name of the project to which this migration workflow belongs. - // Example: `projects/foo/locations/bar` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // Required. The migration workflow to create. - MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to get a previously created migration workflow. -message GetMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; -} - -// A request to list previously created migration workflows. -message ListMigrationWorkflowsRequest { - // Required. The project and location of the migration workflows to list. - // Example: `projects/123/locations/us` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; - - // The maximum number of migration workflows to return. The service may return - // fewer than this number. - int32 page_size = 3; - - // A page token, received from previous `ListMigrationWorkflows` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationWorkflows` - // must match the call that provided the page token. - string page_token = 4; -} - -// Response object for a `ListMigrationWorkflows` call. -message ListMigrationWorkflowsResponse { - // The migration workflows for the specified project / location. - repeated MigrationWorkflow migration_workflows = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a previously created migration workflow. -message DeleteMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to start a previously created migration workflow. -message StartMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to get a previously created migration subtasks. -message GetMigrationSubtaskRequest { - // Required. The unique identifier for the migration subtask. - // Example: `projects/123/locations/us/workflows/1234/subtasks/543` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to list previously created migration subtasks. -message ListMigrationSubtasksRequest { - // Required. The migration task of the subtasks to list. - // Example: `projects/123/locations/us/workflows/1234` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of migration tasks to return. The service may return - // fewer than this number. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token, received from previous `ListMigrationSubtasks` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationSubtasks` - // must match the call that provided the page token. - string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The filter to apply. This can be used to get the subtasks of a specific - // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - // task ID (not the name in the named map). - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response object for a `ListMigrationSubtasks` call. -message ListMigrationSubtasksResponse { - // The migration subtasks for the specified task. - repeated MigrationSubtask migration_subtasks = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto deleted file mode 100644 index 994140d..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto +++ /dev/null @@ -1,257 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "TranslationConfigProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// The translation config to capture necessary settings for a translation task -// and subtask. -message TranslationConfigDetails { - // The chosen path where the source for input files will be found. - oneof source_location { - // The Cloud Storage path for a directory of files to translate in a task. - string gcs_source_path = 1; - } - - // The chosen path where the destination for output files will be found. - oneof target_location { - // The Cloud Storage path to write back the corresponding input files to. - string gcs_target_path = 2; - } - - // The dialect of the input files. - Dialect source_dialect = 3; - - // The target dialect for the engine to translate the input to. - Dialect target_dialect = 4; - - // The mapping of full SQL object names from their current state to the - // desired output. - oneof output_name_mapping { - // The mapping of objects to their desired output names in list form. - ObjectNameMappingList name_mapping_list = 5; - } - - // The default source environment values for the translation. - SourceEnv source_env = 6; -} - -// The possible dialect options for translation. -message Dialect { - // The possible dialect options that this message represents. - oneof dialect_value { - // The BigQuery dialect - BigQueryDialect bigquery_dialect = 1; - - // The HiveQL dialect - HiveQLDialect hiveql_dialect = 2; - - // The Redshift dialect - RedshiftDialect redshift_dialect = 3; - - // The Teradata dialect - TeradataDialect teradata_dialect = 4; - - // The Oracle dialect - OracleDialect oracle_dialect = 5; - - // The SparkSQL dialect - SparkSQLDialect sparksql_dialect = 6; - - // The Snowflake dialect - SnowflakeDialect snowflake_dialect = 7; - - // The Netezza dialect - NetezzaDialect netezza_dialect = 8; - - // The Azure Synapse dialect - AzureSynapseDialect azure_synapse_dialect = 9; - - // The Vertica dialect - VerticaDialect vertica_dialect = 10; - - // The SQL Server dialect - SQLServerDialect sql_server_dialect = 11; - - // The Postgresql dialect - PostgresqlDialect postgresql_dialect = 12; - - // The Presto dialect - PrestoDialect presto_dialect = 13; - - // The MySQL dialect - MySQLDialect mysql_dialect = 14; - } -} - -// The dialect definition for BigQuery. -message BigQueryDialect {} - -// The dialect definition for HiveQL. -message HiveQLDialect {} - -// The dialect definition for Redshift. -message RedshiftDialect {} - -// The dialect definition for Teradata. -message TeradataDialect { - // The sub-dialect options for Teradata. - enum Mode { - // Unspecified mode. - MODE_UNSPECIFIED = 0; - - // Teradata SQL mode. - SQL = 1; - - // BTEQ mode (which includes SQL). - BTEQ = 2; - } - - // Which Teradata sub-dialect mode the user specifies. - Mode mode = 1; -} - -// The dialect definition for Oracle. -message OracleDialect {} - -// The dialect definition for SparkSQL. -message SparkSQLDialect {} - -// The dialect definition for Snowflake. -message SnowflakeDialect {} - -// The dialect definition for Netezza. -message NetezzaDialect {} - -// The dialect definition for Azure Synapse. -message AzureSynapseDialect {} - -// The dialect definition for Vertica. -message VerticaDialect {} - -// The dialect definition for SQL Server. -message SQLServerDialect {} - -// The dialect definition for Postgresql. -message PostgresqlDialect {} - -// The dialect definition for Presto. -message PrestoDialect {} - -// The dialect definition for MySQL. -message MySQLDialect {} - -// Represents a map of name mappings using a list of key:value proto messages of -// existing name to desired output name. -message ObjectNameMappingList { - // The elements of the object name map. - repeated ObjectNameMapping name_map = 1; -} - -// Represents a key-value pair of NameMappingKey to NameMappingValue to -// represent the mapping of SQL names from the input value to desired output. -message ObjectNameMapping { - // The name of the object in source that is being mapped. - NameMappingKey source = 1; - - // The desired target name of the object that is being mapped. - NameMappingValue target = 2; -} - -// The potential components of a full name mapping that will be mapped -// during translation in the source data warehouse. -message NameMappingKey { - // The type of the object that is being mapped. - enum Type { - // Unspecified name mapping type. - TYPE_UNSPECIFIED = 0; - - // The object being mapped is a database. - DATABASE = 1; - - // The object being mapped is a schema. - SCHEMA = 2; - - // The object being mapped is a relation. - RELATION = 3; - - // The object being mapped is an attribute. - ATTRIBUTE = 4; - - // The object being mapped is a relation alias. - RELATION_ALIAS = 5; - - // The object being mapped is a an attribute alias. - ATTRIBUTE_ALIAS = 6; - - // The object being mapped is a function. - FUNCTION = 7; - } - - // The type of object that is being mapped. - Type type = 1; - - // The database name (BigQuery project ID equivalent in the source data - // warehouse). - string database = 2; - - // The schema name (BigQuery dataset equivalent in the source data warehouse). - string schema = 3; - - // The relation name (BigQuery table or view equivalent in the source data - // warehouse). - string relation = 4; - - // The attribute name (BigQuery column equivalent in the source data - // warehouse). - string attribute = 5; -} - -// The potential components of a full name mapping that will be mapped -// during translation in the target data warehouse. -message NameMappingValue { - // The database name (BigQuery project ID equivalent in the target data - // warehouse). - string database = 1; - - // The schema name (BigQuery dataset equivalent in the target data warehouse). - string schema = 2; - - // The relation name (BigQuery table or view equivalent in the target data - // warehouse). - string relation = 3; - - // The attribute name (BigQuery column equivalent in the target data - // warehouse). - string attribute = 4; -} - -// Represents the default source environment values for the translation. -message SourceEnv { - // The default database name to fully qualify SQL objects when their database - // name is missing. - string default_database = 1; - - // The schema search path. When SQL objects are missing schema name, - // translation engine will search through this list to find the value. - repeated string schema_search_path = 2; -} diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js deleted file mode 100644 index 9aed895..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, migrationWorkflow) { - // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - */ - // const parent = 'abc123' - /** - * Required. The migration workflow to create. - */ - // const migrationWorkflow = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callCreateMigrationWorkflow() { - // Construct request - const request = { - parent, - migrationWorkflow, - }; - - // Run request - const response = await migrationClient.createMigrationWorkflow(request); - console.log(response); - } - - callCreateMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js deleted file mode 100644 index aeda293..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callDeleteMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.deleteMigrationWorkflow(request); - console.log(response); - } - - callDeleteMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js deleted file mode 100644 index df6f7d2..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - */ - // const name = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationSubtask() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationSubtask(request); - console.log(response); - } - - callGetMigrationSubtask(); - // [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js deleted file mode 100644 index 72f7565..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationWorkflow(request); - console.log(response); - } - - callGetMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js deleted file mode 100644 index 95e5833..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const parent = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - /** - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - /** - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - */ - // const filter = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationSubtasks() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationSubtasksAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationSubtasks(); - // [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js deleted file mode 100644 index 63402d4..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - */ - // const parent = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - /** - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationWorkflows() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationWorkflowsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationWorkflows(); - // [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js deleted file mode 100644 index 76c614c..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callStartMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.startMigrationWorkflow(request); - console.log(response); - } - - callStartMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json deleted file mode 100644 index c5cd62c..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json +++ /dev/null @@ -1,335 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-migration", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2", - "version": "v2" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", - "title": "MigrationService createMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Creates a migration workflow.", - "canonical": true, - "file": "migration_service.create_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 56, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "migration_workflow", - "type": ".google.cloud.bigquery.migration.v2.MigrationWorkflow" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", - "title": "MigrationService getMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration workflow.", - "canonical": true, - "file": "migration_service.get_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 55, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", - "title": "MigrationService listMigrationWorkflows Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration workflow.", - "canonical": true, - "file": "migration_service.list_migration_workflows.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 69, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", - "title": "MigrationService deleteMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Deletes a migration workflow by name.", - "canonical": true, - "file": "migration_service.delete_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 51, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", - "title": "MigrationService startMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", - "canonical": true, - "file": "migration_service.start_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 51, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", - "title": "MigrationService getMigrationSubtask Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration subtask.", - "canonical": true, - "file": "migration_service.get_migration_subtask.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 55, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationSubtask", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", - "title": "MigrationService listMigrationSubtasks Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration subtasks.", - "canonical": true, - "file": "migration_service.list_migration_subtasks.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 75, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v2/src/index.ts b/owl-bot-staging/v2/src/index.ts deleted file mode 100644 index 35a8fd9..0000000 --- a/owl-bot-staging/v2/src/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v2 from './v2'; -const MigrationServiceClient = v2.MigrationServiceClient; -type MigrationServiceClient = v2.MigrationServiceClient; -export {v2, MigrationServiceClient}; -export default {v2, MigrationServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v2/src/v2/gapic_metadata.json b/owl-bot-staging/v2/src/v2/gapic_metadata.json deleted file mode 100644 index e0aa12f..0000000 --- a/owl-bot-staging/v2/src/v2/gapic_metadata.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.migration.v2", - "libraryPackage": "@google-cloud/migration", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/src/v2/index.ts b/owl-bot-staging/v2/src/v2/index.ts deleted file mode 100644 index f75d208..0000000 --- a/owl-bot-staging/v2/src/v2/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2/src/v2/migration_service_client.ts b/owl-bot-staging/v2/src/v2/migration_service_client.ts deleted file mode 100644 index 20f6e71..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_client.ts +++ /dev/null @@ -1,1246 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; - -import { Transform } from 'stream'; -import { RequestType } from 'google-gax/build/src/apitypes'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v2/migration_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './migration_service_client_config.json'; - -const version = require('../../../package.json').version; - -/** - * Service to handle EDW migrations. - * @class - * @memberof v2 - */ -export class MigrationServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - migrationServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of MigrationServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof MigrationServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' - ), - migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listMigrationWorkflows: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), - listMigrationSubtasks: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.migration.v2.MigrationService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.migrationServiceStub) { - return this.migrationServiceStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.migration.v2.MigrationService. - this.migrationServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2.MigrationService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.migration.v2.MigrationService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const migrationServiceStubMethods = - ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; - for (const methodName of migrationServiceStubMethods) { - const callPromise = this.migrationServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.migrationServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates a migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - * @param {google.cloud.bigquery.migration.v2.MigrationWorkflow} request.migrationWorkflow - * Required. The migration workflow to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.create_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async - */ - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.get_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async - */ - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationWorkflow(request, options, callback); - } -/** - * Deletes a migration workflow by name. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.delete_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async - */ - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); - } -/** - * Starts a previously created migration workflow. I.e., the state transitions - * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - * An error will be signaled if the state is anything other than DRAFT or - * RUNNING. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.start_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async - */ - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.startMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration subtask. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.get_migration_subtask.js - * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async - */ - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationSubtask(request, options, callback); - } - - /** - * Lists previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse - ]>; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationWorkflows(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflowsStream( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationWorkflows`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.list_migration_workflows.js - * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async - */ - listMigrationWorkflowsAsync( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.asyncIterate( - this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - /** - * Lists previously created migration subtasks. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse - ]>; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationSubtasks(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasksStream( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationSubtasks`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.list_migration_subtasks.js - * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async - */ - listMigrationSubtasksAsync( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.asyncIterate( - this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified migrationSubtask resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @param {string} subtask - * @returns {string} Resource name string. - */ - migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { - return this.pathTemplates.migrationSubtaskPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - subtask: subtask, - }); - } - - /** - * Parse the project from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; - } - - /** - * Parse the location from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; - } - - /** - * Parse the workflow from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; - } - - /** - * Parse the subtask from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the subtask. - */ - matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; - } - - /** - * Return a fully-qualified migrationWorkflow resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @returns {string} Resource name string. - */ - migrationWorkflowPath(project:string,location:string,workflow:string) { - return this.pathTemplates.migrationWorkflowPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - }); - } - - /** - * Parse the project from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; - } - - /** - * Parse the location from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; - } - - /** - * Parse the workflow from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.migrationServiceStub && !this._terminated) { - return this.migrationServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v2/src/v2/migration_service_client_config.json b/owl-bot-staging/v2/src/v2/migration_service_client_config.json deleted file mode 100644 index 5832815..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_client_config.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.migration.v2.MigrationService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationWorkflows": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "DeleteMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "GetMigrationSubtask": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "ListMigrationSubtasks": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json deleted file mode 100644 index 57df7ab..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/migration/v2/migration_entities.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_error_details.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_metrics.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_service.proto", - "../../protos/google/cloud/bigquery/migration/v2/translation_config.proto" -] diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js deleted file mode 100644 index aa2c893..0000000 --- a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/migration'); - -function main() { - const migrationServiceClient = new migration.MigrationServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 0afe940..0000000 --- a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {MigrationServiceClient} from '@google-cloud/migration'; - -// check that the client class type name can be used -function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const migrationServiceClient = new MigrationServiceClient(); - doStuffWithMigrationServiceClient(migrationServiceClient); -} - -main(); diff --git a/owl-bot-staging/v2/system-test/install.ts b/owl-bot-staging/v2/system-test/install.ts deleted file mode 100644 index 8ec4522..0000000 --- a/owl-bot-staging/v2/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import { packNTest } from 'pack-n-play'; -import { readFileSync } from 'fs'; -import { describe, it } from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts deleted file mode 100644 index eb05cd6..0000000 --- a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts +++ /dev/null @@ -1,1254 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import { describe, it } from 'mocha'; -import * as migrationserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v2.MigrationServiceClient', () => { - it('has servicePath', () => { - const servicePath = migrationserviceModule.v2.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - - describe('createMigrationWorkflow', () => { - it('invokes createMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.createMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationWorkflow', () => { - it('invokes getMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - }); - }); - - describe('deleteMigrationWorkflow', () => { - it('invokes deleteMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.deleteMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - }); - }); - - describe('startMigrationWorkflow', () => { - it('invokes startMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.startMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationSubtask', () => { - it('invokes getMigrationSubtask without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationSubtask(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationSubtask( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationSubtask with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - }); - }); - - describe('listMigrationWorkflows', () => { - it('invokes listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationWorkflows(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflows without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationWorkflows( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationWorkflows(request), expectedError); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflowsStream without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationWorkflowsStream with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; - const iterable = client.listMigrationWorkflowsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationWorkflowsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('listMigrationSubtasks', () => { - it('invokes listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationSubtasks(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasks without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationSubtasks( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationSubtasks(request), expectedError); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasksStream without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationSubtasksStream with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; - const iterable = client.listMigrationSubtasksAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationSubtasksAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationSubtask', () => { - const fakePath = "/rendered/path/migrationSubtask"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - subtask: "subtaskValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationSubtaskPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationSubtaskPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationSubtaskPath', () => { - const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationSubtaskName', () => { - const result = client.matchProjectFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationSubtaskName', () => { - const result = client.matchLocationFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationSubtaskName', () => { - const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchSubtaskFromMigrationSubtaskName', () => { - const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "subtaskValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationWorkflow', () => { - const fakePath = "/rendered/path/migrationWorkflow"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationWorkflowPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationWorkflowPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationWorkflowPath', () => { - const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationWorkflowName', () => { - const result = client.matchProjectFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationWorkflowName', () => { - const result = client.matchLocationFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationWorkflowName', () => { - const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v2/tsconfig.json b/owl-bot-staging/v2/tsconfig.json deleted file mode 100644 index c78f1c8..0000000 --- a/owl-bot-staging/v2/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v2/webpack.config.js b/owl-bot-staging/v2/webpack.config.js deleted file mode 100644 index 9178c4b..0000000 --- a/owl-bot-staging/v2/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'MigrationService', - filename: './migration-service.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -}; diff --git a/owl-bot-staging/v2alpha/.eslintignore b/owl-bot-staging/v2alpha/.eslintignore deleted file mode 100644 index cfc348e..0000000 --- a/owl-bot-staging/v2alpha/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v2alpha/.eslintrc.json b/owl-bot-staging/v2alpha/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/owl-bot-staging/v2alpha/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v2alpha/.gitignore b/owl-bot-staging/v2alpha/.gitignore deleted file mode 100644 index 5d32b23..0000000 --- a/owl-bot-staging/v2alpha/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v2alpha/.jsdoc.js b/owl-bot-staging/v2alpha/.jsdoc.js deleted file mode 100644 index aabe555..0000000 --- a/owl-bot-staging/v2alpha/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/migration', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v2alpha/.mocharc.js b/owl-bot-staging/v2alpha/.mocharc.js deleted file mode 100644 index 481c522..0000000 --- a/owl-bot-staging/v2alpha/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v2alpha/.prettierrc.js b/owl-bot-staging/v2alpha/.prettierrc.js deleted file mode 100644 index 494e147..0000000 --- a/owl-bot-staging/v2alpha/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v2alpha/README.md b/owl-bot-staging/v2alpha/README.md deleted file mode 100644 index 4e0341c..0000000 --- a/owl-bot-staging/v2alpha/README.md +++ /dev/null @@ -1 +0,0 @@ -Migration: Nodejs Client diff --git a/owl-bot-staging/v2alpha/linkinator.config.json b/owl-bot-staging/v2alpha/linkinator.config.json deleted file mode 100644 index befd23c..0000000 --- a/owl-bot-staging/v2alpha/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v2alpha/package.json b/owl-bot-staging/v2alpha/package.json deleted file mode 100644 index 6b38f7b..0000000 --- a/owl-bot-staging/v2alpha/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "@google-cloud/migration", - "version": "0.1.0", - "description": "Migration client for Node.js", - "repository": "googleapis/nodejs-migration", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google migration", - "migration", - "migration service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^3.1.1" - }, - "devDependencies": { - "@types/mocha": "^9.1.0", - "@types/node": "^16.0.0", - "@types/sinon": "^10.0.8", - "c8": "^7.11.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.7", - "jsdoc-fresh": "^2.0.0", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^3.0.0", - "mocha": "^10.0.0", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^9.2.6", - "typescript": "^4.5.5", - "webpack": "^5.67.0", - "webpack-cli": "^4.9.1" - }, - "engines": { - "node": ">=v12" - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto deleted file mode 100644 index 0c6ea13..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "AssessmentTaskProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Assessment task config. -message AssessmentTaskDetails { - // Required. The Cloud Storage path for assessment input files. - string input_path = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The BigQuery dataset for output. - string output_dataset = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. An optional Cloud Storage path to write the query logs (which is - // then used as an input path on the translation task) - string querylogs_path = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data source or data warehouse type (eg: TERADATA/REDSHIFT) - // from which the input data is extracted. - string data_source = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// Details for an assessment task orchestration result. -message AssessmentOrchestrationResultDetails { - // Optional. The version used for the output table schemas. - string output_tables_schema_version = 1 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto deleted file mode 100644 index 50d4c75..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto +++ /dev/null @@ -1,244 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2alpha/assessment_task.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; -import "google/cloud/bigquery/migration/v2alpha/translation_task.proto"; -import "google/protobuf/any.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationEntitiesProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// A migration workflow which specifies what needs to be done for an EDW -// migration. -message MigrationWorkflow { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}" - }; - - // Possible migration workflow states. - enum State { - // Workflow state is unspecified. - STATE_UNSPECIFIED = 0; - - // Workflow is in draft status, i.e. tasks are not yet eligible for - // execution. - DRAFT = 1; - - // Workflow is running (i.e. tasks are eligible for execution). - RUNNING = 2; - - // Workflow is paused. Tasks currently in progress may continue, but no - // further tasks will be scheduled. - PAUSED = 3; - - // Workflow is complete. There should not be any task in a non-terminal - // state, but if they are (e.g. forced termination), they will not be - // scheduled. - COMPLETED = 4; - } - - // Output only. Immutable. The unique identifier for the migration workflow. The ID is - // server-generated. - // - // Example: `projects/123/locations/us/workflows/345` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The display name of the workflow. This can be set to give a workflow - // a descriptive name. There is no guarantee or enforcement of uniqueness. - string display_name = 6; - - // The tasks in a workflow in a named map. The name (i.e. key) has no - // meaning and is merely a convenient way to address a specific task - // in a workflow. - map tasks = 2; - - // Output only. That status of the workflow. - State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the workflow was created. - google.protobuf.Timestamp create_time = 4; - - // Time when the workflow was last updated. - google.protobuf.Timestamp last_update_time = 5; -} - -// A single task for a migration which has details about the configuration of -// the task. -message MigrationTask { - // Possible states of a migration task. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The task is waiting for orchestration. - PENDING = 1; - - // The task is assigned to an orchestrator. - ORCHESTRATING = 2; - - // The task is running, i.e. its subtasks are ready for execution. - RUNNING = 3; - - // Tha task is paused. Assigned subtasks can continue, but no new subtasks - // will be scheduled. - PAUSED = 4; - - // The task finished successfully. - SUCCEEDED = 5; - - // The task finished unsuccessfully. - FAILED = 6; - } - - // The details of the task. - oneof task_details { - // Task configuration for Assessment. - AssessmentTaskDetails assessment_task_details = 12; - - // Task configuration for Batch/Offline SQL Translation. - TranslationTaskDetails translation_task_details = 13; - } - - // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. - string id = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The type of the task. This must be a supported task type. - string type = 2; - - // DEPRECATED! Use one of the task_details below. - // The details of the task. The type URL must be one of the supported task - // details messages and correspond to the Task's type. - google.protobuf.Any details = 3; - - // Output only. The current state of the task. - State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in FAILED state. - google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the task was created. - google.protobuf.Timestamp create_time = 6; - - // Time when the task was last updated. - google.protobuf.Timestamp last_update_time = 7; - - // Output only. Additional information about the orchestration. - MigrationTaskOrchestrationResult orchestration_result = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A subtask for a migration which carries details about the configuration of -// the subtask. The content of the details should not matter to the end user, -// but is a contract between the subtask creator and subtask worker. -message MigrationSubtask { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" - }; - - // Possible states of a migration subtask. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The subtask is ready, i.e. it is ready for execution. - ACTIVE = 1; - - // The subtask is running, i.e. it is assigned to a worker for execution. - RUNNING = 2; - - // The subtask finished successfully. - SUCCEEDED = 3; - - // The subtask finished unsuccessfully. - FAILED = 4; - - // The subtask is paused, i.e., it will not be scheduled. If it was already - // assigned,it might still finish but no new lease renewals will be granted. - PAUSED = 5; - } - - // Output only. Immutable. The resource name for the migration subtask. The ID is - // server-generated. - // - // Example: `projects/123/locations/us/workflows/345/subtasks/678` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The unique ID of the task to which this subtask belongs. - string task_id = 2; - - // The type of the Subtask. The migration service does not check whether this - // is a known type. It is up to the task creator (i.e. orchestrator or worker) - // to ensure it only creates subtasks for which there are compatible workers - // polling for Subtasks. - string type = 3; - - // Output only. The current state of the subtask. - State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in FAILED state. - google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Provides details to errors and issues encountered while processing the - // subtask. Presence of error details does not mean that the subtask failed. - repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The number or resources with errors. Note: This is not the total - // number of errors as each resource can have more than one error. - // This is used to indicate truncation by having a `resource_error_count` - // that is higher than the size of `resource_error_details`. - int32 resource_error_count = 13; - - // Time when the subtask was created. - google.protobuf.Timestamp create_time = 7; - - // Time when the subtask was last updated. - google.protobuf.Timestamp last_update_time = 8; - - // The metrics for the subtask. - repeated TimeSeries metrics = 11; -} - -// Additional information from the orchestrator when it is done with the -// task orchestration. -message MigrationTaskOrchestrationResult { - // Details specific to the task type. - oneof details { - // Details specific to assessment task types. - AssessmentOrchestrationResultDetails assessment_details = 1; - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto deleted file mode 100644 index 89dac5e..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationErrorDetailsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Provides details for errors and the corresponding resources. -message ResourceErrorDetail { - // Required. Information about the resource where the error is located. - google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The error details for the resource. - repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. How many errors there are in total for the resource. Truncation can be - // indicated by having an `error_count` that is higher than the size of - // `error_details`. - int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Provides details for errors, e.g. issues that where encountered when -// processing a subtask. -message ErrorDetail { - // Optional. The exact location within the resource (if applicable). - ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Describes the cause of the error with structured detail. - google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Holds information about where the error is located. -message ErrorLocation { - // Optional. If applicable, denotes the line where the error occurred. A zero value - // means that there is no line information. - int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If applicable, denotes the column where the error occurred. A zero value - // means that there is no columns information. - int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto deleted file mode 100644 index ce60dd2..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/distribution.proto"; -import "google/api/field_behavior.proto"; -import "google/api/metric.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationMetricsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// The metrics object for a SubTask. -message TimeSeries { - // Required. The name of the metric. - // - // If the metric is not known by the service yet, it will be auto-created. - string metric = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The value type of the time series. - google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric kind of the time series. - // - // If present, it must be the same as the metric kind of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // this field specifies the metric kind of the new descriptor and must be - // either `GAUGE` (the default) or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time does not need to be supplied, but if it is supplied, it must - // equal the end time. For `DELTA` metrics, the start and end time should - // specify a non-zero interval, with subsequent points specifying contiguous - // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end - // time should specify a non-zero interval, with subsequent points specifying - // the same start time and increasing end times, until an event resets the - // cumulative value to zero and sets a new start time for the following - // points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A time interval extending just after a start time through an end time. -// If the start time is the same as the end time, then the interval -// represents a single point in time. -message TimeInterval { - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately +/-9.2x10^18. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately +/-10^(+/-300) and it has 16 significant digits of - // precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto deleted file mode 100644 index b5e9014..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto +++ /dev/null @@ -1,248 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationServiceProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Service to handle EDW migrations. -service MigrationService { - option (google.api.default_host) = "bigquerymigration.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a migration workflow. - rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - post: "/v2alpha/{parent=projects/*/locations/*}/workflows" - body: "migration_workflow" - }; - option (google.api.method_signature) = "parent,migration_workflow"; - } - - // Gets a previously created migration workflow. - rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration workflow. - rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { - option (google.api.http) = { - get: "/v2alpha/{parent=projects/*/locations/*}/workflows" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a migration workflow by name. - rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Starts a previously created migration workflow. I.e., the state transitions - // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - // An error will be signaled if the state is anything other than DRAFT or - // RUNNING. - rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Gets a previously created migration subtask. - rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { - option (google.api.http) = { - get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration subtasks. - rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { - option (google.api.http) = { - get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" - }; - option (google.api.method_signature) = "parent"; - } -} - -// Request to create a migration workflow resource. -message CreateMigrationWorkflowRequest { - // Required. The name of the project to which this migration workflow belongs. - // Example: `projects/foo/locations/bar` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // Required. The migration workflow to create. - MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to get a previously created migration workflow. -message GetMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; -} - -// A request to list previously created migration workflows. -message ListMigrationWorkflowsRequest { - // Required. The project and location of the migration workflows to list. - // Example: `projects/123/locations/us` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; - - // The maximum number of migration workflows to return. The service may return - // fewer than this number. - int32 page_size = 3; - - // A page token, received from previous `ListMigrationWorkflows` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationWorkflows` - // must match the call that provided the page token. - string page_token = 4; -} - -// Response object for a `ListMigrationWorkflows` call. -message ListMigrationWorkflowsResponse { - // The migration workflows for the specified project / location. - repeated MigrationWorkflow migration_workflows = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a previously created migration workflow. -message DeleteMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to start a previously created migration workflow. -message StartMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to get a previously created migration subtasks. -message GetMigrationSubtaskRequest { - // Required. The unique identifier for the migration subtask. - // Example: `projects/123/locations/us/workflows/1234/subtasks/543` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to list previously created migration subtasks. -message ListMigrationSubtasksRequest { - // Required. The migration task of the subtasks to list. - // Example: `projects/123/locations/us/workflows/1234` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of migration tasks to return. The service may return - // fewer than this number. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token, received from previous `ListMigrationSubtasks` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationSubtasks` - // must match the call that provided the page token. - string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The filter to apply. This can be used to get the subtasks of a specific - // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - // task ID (not the name in the named map). - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response object for a `ListMigrationSubtasks` call. -message ListMigrationSubtasksResponse { - // The migration subtasks for the specified task. - repeated MigrationSubtask migration_subtasks = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto deleted file mode 100644 index bf4b27e..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "TranslationTaskProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Mapping between an input and output file to be translated in a subtask. -message TranslationFileMapping { - // The Cloud Storage path for a file to translation in a subtask. - string input_path = 1; - - // The Cloud Storage path to write back the corresponding input file to. - string output_path = 2; -} - -// The translation task config to capture necessary settings for a translation -// task and subtask. -message TranslationTaskDetails { - // The file encoding types. - enum FileEncoding { - // File encoding setting is not specified. - FILE_ENCODING_UNSPECIFIED = 0; - - // File encoding is UTF_8. - UTF_8 = 1; - - // File encoding is ISO_8859_1. - ISO_8859_1 = 2; - - // File encoding is US_ASCII. - US_ASCII = 3; - - // File encoding is UTF_16. - UTF_16 = 4; - - // File encoding is UTF_16LE. - UTF_16LE = 5; - - // File encoding is UTF_16BE. - UTF_16BE = 6; - } - - // The special token data type. - enum TokenType { - // Token type is not specified. - TOKEN_TYPE_UNSPECIFIED = 0; - - // Token type as string. - STRING = 1; - - // Token type as integer. - INT64 = 2; - - // Token type as numeric. - NUMERIC = 3; - - // Token type as boolean. - BOOL = 4; - - // Token type as float. - FLOAT64 = 5; - - // Token type as date. - DATE = 6; - - // Token type as timestamp. - TIMESTAMP = 7; - } - - // The language specific settings for the translation task. - oneof language_options { - // The Teradata SQL specific settings for the translation task. - TeradataOptions teradata_options = 10; - - // The BTEQ specific settings for the translation task. - BteqOptions bteq_options = 11; - } - - // The Cloud Storage path for translation input files. - string input_path = 1; - - // The Cloud Storage path for translation output files. - string output_path = 2; - - // Cloud Storage files to be processed for translation. - repeated TranslationFileMapping file_paths = 12; - - // The Cloud Storage path to DDL files as table schema to assist semantic - // translation. - string schema_path = 3; - - // The file encoding type. - FileEncoding file_encoding = 4; - - // The settings for SQL identifiers. - IdentifierSettings identifier_settings = 5; - - // The map capturing special tokens to be replaced during translation. The key - // is special token in string. The value is the token data type. This is used - // to translate SQL query template which contains special token as place - // holder. The special token makes a query invalid to parse. This map will be - // applied to annotate those special token with types to let parser understand - // how to parse them into proper structure with type information. - map special_token_map = 6; - - // The filter applied to translation details. - Filter filter = 7; - - // Specifies the exact name of the bigquery table ("dataset.table") to be used - // for surfacing raw translation errors. If the table does not exist, we will - // create it. If it already exists and the schema is the same, we will re-use. - // If the table exists and the schema is different, we will throw an error. - string translation_exception_table = 13; -} - -// The filter applied to fields of translation details. -message Filter { - // The list of prefixes used to exclude processing for input files. - repeated string input_file_exclusion_prefixes = 1; -} - -// Settings related to SQL identifiers. -message IdentifierSettings { - // The identifier case type. - enum IdentifierCase { - // The identifier case is not specified. - IDENTIFIER_CASE_UNSPECIFIED = 0; - - // Identifiers' cases will be kept as the original cases. - ORIGINAL = 1; - - // Identifiers will be in upper cases. - UPPER = 2; - - // Identifiers will be in lower cases. - LOWER = 3; - } - - // The SQL identifier rewrite mode. - enum IdentifierRewriteMode { - // SQL Identifier rewrite mode is unspecified. - IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0; - - // SQL identifiers won't be rewrite. - NONE = 1; - - // All SQL identifiers will be rewrite. - REWRITE_ALL = 2; - } - - // The setting to control output queries' identifier case. - IdentifierCase output_identifier_case = 1; - - // Specifies the rewrite mode for SQL identifiers. - IdentifierRewriteMode identifier_rewrite_mode = 2; -} - -// Teradata SQL specific translation task related settings. -message TeradataOptions { - -} - -// BTEQ translation task related settings. -message BteqOptions { - // Specifies the project and dataset in BigQuery that will be used for - // external table creation during the translation. - DatasetReference project_dataset = 1; - - // The Cloud Storage location to be used as the default path for files that - // are not otherwise specified in the file replacement map. - string default_path_uri = 2; - - // Maps the local paths that are used in BTEQ scripts (the keys) to the paths - // in Cloud Storage that should be used in their stead in the translation (the - // value). - map file_replacement_map = 3; -} - -// Reference to a BigQuery dataset. -message DatasetReference { - // A unique ID for this dataset, without the project name. The ID - // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). - // The maximum length is 1,024 characters. - string dataset_id = 1; - - // The ID of the project containing this dataset. - string project_id = 2; -} diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js deleted file mode 100644 index 4004520..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, migrationWorkflow) { - // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - */ - // const parent = 'abc123' - /** - * Required. The migration workflow to create. - */ - // const migrationWorkflow = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callCreateMigrationWorkflow() { - // Construct request - const request = { - parent, - migrationWorkflow, - }; - - // Run request - const response = await migrationClient.createMigrationWorkflow(request); - console.log(response); - } - - callCreateMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js deleted file mode 100644 index 04adf0d..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callDeleteMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.deleteMigrationWorkflow(request); - console.log(response); - } - - callDeleteMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js deleted file mode 100644 index 8bf28e2..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - */ - // const name = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationSubtask() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationSubtask(request); - console.log(response); - } - - callGetMigrationSubtask(); - // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js deleted file mode 100644 index bbc9e68..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationWorkflow(request); - console.log(response); - } - - callGetMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js deleted file mode 100644 index 61a50a0..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const parent = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - /** - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - /** - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - */ - // const filter = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationSubtasks() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationSubtasksAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationSubtasks(); - // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js deleted file mode 100644 index 41d06f8..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - */ - // const parent = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - /** - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationWorkflows() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationWorkflowsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationWorkflows(); - // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js deleted file mode 100644 index 236300c..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callStartMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.startMigrationWorkflow(request); - console.log(response); - } - - callStartMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json deleted file mode 100644 index e8dd517..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json +++ /dev/null @@ -1,335 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-migration", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2alpha", - "version": "v2alpha" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", - "title": "MigrationService createMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Creates a migration workflow.", - "canonical": true, - "file": "migration_service.create_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 56, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "migration_workflow", - "type": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", - "title": "MigrationService getMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration workflow.", - "canonical": true, - "file": "migration_service.get_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 55, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", - "title": "MigrationService listMigrationWorkflows Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration workflow.", - "canonical": true, - "file": "migration_service.list_migration_workflows.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 69, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", - "title": "MigrationService deleteMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Deletes a migration workflow by name.", - "canonical": true, - "file": "migration_service.delete_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 51, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", - "title": "MigrationService startMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", - "canonical": true, - "file": "migration_service.start_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 51, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", - "title": "MigrationService getMigrationSubtask Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration subtask.", - "canonical": true, - "file": "migration_service.get_migration_subtask.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 55, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationSubtask", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", - "title": "MigrationService listMigrationSubtasks Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration subtasks.", - "canonical": true, - "file": "migration_service.list_migration_subtasks.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 75, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v2alpha/src/index.ts b/owl-bot-staging/v2alpha/src/index.ts deleted file mode 100644 index 288e629..0000000 --- a/owl-bot-staging/v2alpha/src/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v2alpha from './v2alpha'; -const MigrationServiceClient = v2alpha.MigrationServiceClient; -type MigrationServiceClient = v2alpha.MigrationServiceClient; -export {v2alpha, MigrationServiceClient}; -export default {v2alpha, MigrationServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json deleted file mode 100644 index f751ba9..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.migration.v2alpha", - "libraryPackage": "@google-cloud/migration", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/index.ts b/owl-bot-staging/v2alpha/src/v2alpha/index.ts deleted file mode 100644 index f75d208..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts deleted file mode 100644 index 50e1053..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts +++ /dev/null @@ -1,1246 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; - -import { Transform } from 'stream'; -import { RequestType } from 'google-gax/build/src/apitypes'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v2alpha/migration_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './migration_service_client_config.json'; - -const version = require('../../../package.json').version; - -/** - * Service to handle EDW migrations. - * @class - * @memberof v2alpha - */ -export class MigrationServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - migrationServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of MigrationServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof MigrationServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' - ), - migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listMigrationWorkflows: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), - listMigrationSubtasks: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.migration.v2alpha.MigrationService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.migrationServiceStub) { - return this.migrationServiceStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.migration.v2alpha.MigrationService. - this.migrationServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2alpha.MigrationService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.migration.v2alpha.MigrationService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const migrationServiceStubMethods = - ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; - for (const methodName of migrationServiceStubMethods) { - const callPromise = this.migrationServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.migrationServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates a migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - * @param {google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} request.migrationWorkflow - * Required. The migration workflow to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.create_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async - */ - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.get_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async - */ - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationWorkflow(request, options, callback); - } -/** - * Deletes a migration workflow by name. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.delete_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async - */ - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); - } -/** - * Starts a previously created migration workflow. I.e., the state transitions - * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - * An error will be signaled if the state is anything other than DRAFT or - * RUNNING. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.start_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async - */ - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.startMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration subtask. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.get_migration_subtask.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async - */ - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationSubtask(request, options, callback); - } - - /** - * Lists previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse - ]>; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationWorkflows(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflowsStream( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationWorkflows`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.list_migration_workflows.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async - */ - listMigrationWorkflowsAsync( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.asyncIterate( - this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - /** - * Lists previously created migration subtasks. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse - ]>; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationSubtasks(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasksStream( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationSubtasks`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.list_migration_subtasks.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async - */ - listMigrationSubtasksAsync( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.asyncIterate( - this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified migrationSubtask resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @param {string} subtask - * @returns {string} Resource name string. - */ - migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { - return this.pathTemplates.migrationSubtaskPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - subtask: subtask, - }); - } - - /** - * Parse the project from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; - } - - /** - * Parse the location from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; - } - - /** - * Parse the workflow from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; - } - - /** - * Parse the subtask from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the subtask. - */ - matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; - } - - /** - * Return a fully-qualified migrationWorkflow resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @returns {string} Resource name string. - */ - migrationWorkflowPath(project:string,location:string,workflow:string) { - return this.pathTemplates.migrationWorkflowPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - }); - } - - /** - * Parse the project from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; - } - - /** - * Parse the location from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; - } - - /** - * Parse the workflow from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.migrationServiceStub && !this._terminated) { - return this.migrationServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json deleted file mode 100644 index 2184b83..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.migration.v2alpha.MigrationService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetMigrationWorkflow": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationWorkflows": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "DeleteMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartMigrationWorkflow": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "GetMigrationSubtask": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationSubtasks": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - } - } - } - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json deleted file mode 100644 index 8e91e42..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto" -] diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js deleted file mode 100644 index aa2c893..0000000 --- a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/migration'); - -function main() { - const migrationServiceClient = new migration.MigrationServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 0afe940..0000000 --- a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {MigrationServiceClient} from '@google-cloud/migration'; - -// check that the client class type name can be used -function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const migrationServiceClient = new MigrationServiceClient(); - doStuffWithMigrationServiceClient(migrationServiceClient); -} - -main(); diff --git a/owl-bot-staging/v2alpha/system-test/install.ts b/owl-bot-staging/v2alpha/system-test/install.ts deleted file mode 100644 index 8ec4522..0000000 --- a/owl-bot-staging/v2alpha/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import { packNTest } from 'pack-n-play'; -import { readFileSync } from 'fs'; -import { describe, it } from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts deleted file mode 100644 index 4b80fbb..0000000 --- a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts +++ /dev/null @@ -1,1254 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import { describe, it } from 'mocha'; -import * as migrationserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v2alpha.MigrationServiceClient', () => { - it('has servicePath', () => { - const servicePath = migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - - describe('createMigrationWorkflow', () => { - it('invokes createMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.createMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationWorkflow', () => { - it('invokes getMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - }); - }); - - describe('deleteMigrationWorkflow', () => { - it('invokes deleteMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.deleteMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - }); - }); - - describe('startMigrationWorkflow', () => { - it('invokes startMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.startMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationSubtask', () => { - it('invokes getMigrationSubtask without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationSubtask(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationSubtask( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationSubtask with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - }); - }); - - describe('listMigrationWorkflows', () => { - it('invokes listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationWorkflows(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflows without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationWorkflows( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationWorkflows(request), expectedError); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflowsStream without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationWorkflowsStream with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; - const iterable = client.listMigrationWorkflowsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationWorkflowsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('listMigrationSubtasks', () => { - it('invokes listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationSubtasks(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasks without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationSubtasks( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationSubtasks(request), expectedError); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasksStream without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationSubtasksStream with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; - const iterable = client.listMigrationSubtasksAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationSubtasksAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationSubtask', () => { - const fakePath = "/rendered/path/migrationSubtask"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - subtask: "subtaskValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationSubtaskPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationSubtaskPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationSubtaskPath', () => { - const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationSubtaskName', () => { - const result = client.matchProjectFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationSubtaskName', () => { - const result = client.matchLocationFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationSubtaskName', () => { - const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchSubtaskFromMigrationSubtaskName', () => { - const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "subtaskValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationWorkflow', () => { - const fakePath = "/rendered/path/migrationWorkflow"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationWorkflowPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationWorkflowPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationWorkflowPath', () => { - const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationWorkflowName', () => { - const result = client.matchProjectFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationWorkflowName', () => { - const result = client.matchLocationFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationWorkflowName', () => { - const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v2alpha/tsconfig.json b/owl-bot-staging/v2alpha/tsconfig.json deleted file mode 100644 index c78f1c8..0000000 --- a/owl-bot-staging/v2alpha/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v2alpha/webpack.config.js b/owl-bot-staging/v2alpha/webpack.config.js deleted file mode 100644 index 9178c4b..0000000 --- a/owl-bot-staging/v2alpha/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'MigrationService', - filename: './migration-service.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -}; diff --git a/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/protos/google/cloud/bigquery/migration/v2/migration_entities.proto index 79ca05b..7d77bae 100644 --- a/protos/google/cloud/bigquery/migration/v2/migration_entities.proto +++ b/protos/google/cloud/bigquery/migration/v2/migration_entities.proto @@ -135,7 +135,7 @@ message MigrationTask { // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, // Translation_Snowflake2BQ, Translation_Netezza2BQ, // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, - // Translation_SQLServer2BQ. + // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. string type = 2; // Output only. The current state of the task. diff --git a/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/protos/google/cloud/bigquery/migration/v2/translation_config.proto index 3ff2902..994140d 100644 --- a/protos/google/cloud/bigquery/migration/v2/translation_config.proto +++ b/protos/google/cloud/bigquery/migration/v2/translation_config.proto @@ -91,6 +91,15 @@ message Dialect { // The SQL Server dialect SQLServerDialect sql_server_dialect = 11; + + // The Postgresql dialect + PostgresqlDialect postgresql_dialect = 12; + + // The Presto dialect + PrestoDialect presto_dialect = 13; + + // The MySQL dialect + MySQLDialect mysql_dialect = 14; } } @@ -142,6 +151,15 @@ message VerticaDialect {} // The dialect definition for SQL Server. message SQLServerDialect {} +// The dialect definition for Postgresql. +message PostgresqlDialect {} + +// The dialect definition for Presto. +message PrestoDialect {} + +// The dialect definition for MySQL. +message MySQLDialect {} + // Represents a map of name mappings using a list of key:value proto messages of // existing name to desired output name. message ObjectNameMappingList { diff --git a/protos/protos.d.ts b/protos/protos.d.ts index 0be7f89..1563ada 100644 --- a/protos/protos.d.ts +++ b/protos/protos.d.ts @@ -1336,6 +1336,15 @@ export namespace google { /** Dialect sqlServerDialect */ sqlServerDialect?: (google.cloud.bigquery.migration.v2.ISQLServerDialect|null); + + /** Dialect postgresqlDialect */ + postgresqlDialect?: (google.cloud.bigquery.migration.v2.IPostgresqlDialect|null); + + /** Dialect prestoDialect */ + prestoDialect?: (google.cloud.bigquery.migration.v2.IPrestoDialect|null); + + /** Dialect mysqlDialect */ + mysqlDialect?: (google.cloud.bigquery.migration.v2.IMySQLDialect|null); } /** Represents a Dialect. */ @@ -1380,8 +1389,17 @@ export namespace google { /** Dialect sqlServerDialect. */ public sqlServerDialect?: (google.cloud.bigquery.migration.v2.ISQLServerDialect|null); + /** Dialect postgresqlDialect. */ + public postgresqlDialect?: (google.cloud.bigquery.migration.v2.IPostgresqlDialect|null); + + /** Dialect prestoDialect. */ + public prestoDialect?: (google.cloud.bigquery.migration.v2.IPrestoDialect|null); + + /** Dialect mysqlDialect. */ + public mysqlDialect?: (google.cloud.bigquery.migration.v2.IMySQLDialect|null); + /** Dialect dialectValue. */ - public dialectValue?: ("bigqueryDialect"|"hiveqlDialect"|"redshiftDialect"|"teradataDialect"|"oracleDialect"|"sparksqlDialect"|"snowflakeDialect"|"netezzaDialect"|"azureSynapseDialect"|"verticaDialect"|"sqlServerDialect"); + public dialectValue?: ("bigqueryDialect"|"hiveqlDialect"|"redshiftDialect"|"teradataDialect"|"oracleDialect"|"sparksqlDialect"|"snowflakeDialect"|"netezzaDialect"|"azureSynapseDialect"|"verticaDialect"|"sqlServerDialect"|"postgresqlDialect"|"prestoDialect"|"mysqlDialect"); /** * Creates a new Dialect instance using the specified properties. @@ -2394,6 +2412,258 @@ export namespace google { public toJSON(): { [k: string]: any }; } + /** Properties of a PostgresqlDialect. */ + interface IPostgresqlDialect { + } + + /** Represents a PostgresqlDialect. */ + class PostgresqlDialect implements IPostgresqlDialect { + + /** + * Constructs a new PostgresqlDialect. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.migration.v2.IPostgresqlDialect); + + /** + * Creates a new PostgresqlDialect instance using the specified properties. + * @param [properties] Properties to set + * @returns PostgresqlDialect instance + */ + public static create(properties?: google.cloud.bigquery.migration.v2.IPostgresqlDialect): google.cloud.bigquery.migration.v2.PostgresqlDialect; + + /** + * Encodes the specified PostgresqlDialect message. Does not implicitly {@link google.cloud.bigquery.migration.v2.PostgresqlDialect.verify|verify} messages. + * @param message PostgresqlDialect message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.migration.v2.IPostgresqlDialect, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified PostgresqlDialect message, length delimited. Does not implicitly {@link google.cloud.bigquery.migration.v2.PostgresqlDialect.verify|verify} messages. + * @param message PostgresqlDialect message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.migration.v2.IPostgresqlDialect, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a PostgresqlDialect message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns PostgresqlDialect + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.migration.v2.PostgresqlDialect; + + /** + * Decodes a PostgresqlDialect message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns PostgresqlDialect + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.migration.v2.PostgresqlDialect; + + /** + * Verifies a PostgresqlDialect message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a PostgresqlDialect message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns PostgresqlDialect + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.migration.v2.PostgresqlDialect; + + /** + * Creates a plain object from a PostgresqlDialect message. Also converts values to other types if specified. + * @param message PostgresqlDialect + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.migration.v2.PostgresqlDialect, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this PostgresqlDialect to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a PrestoDialect. */ + interface IPrestoDialect { + } + + /** Represents a PrestoDialect. */ + class PrestoDialect implements IPrestoDialect { + + /** + * Constructs a new PrestoDialect. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.migration.v2.IPrestoDialect); + + /** + * Creates a new PrestoDialect instance using the specified properties. + * @param [properties] Properties to set + * @returns PrestoDialect instance + */ + public static create(properties?: google.cloud.bigquery.migration.v2.IPrestoDialect): google.cloud.bigquery.migration.v2.PrestoDialect; + + /** + * Encodes the specified PrestoDialect message. Does not implicitly {@link google.cloud.bigquery.migration.v2.PrestoDialect.verify|verify} messages. + * @param message PrestoDialect message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.migration.v2.IPrestoDialect, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified PrestoDialect message, length delimited. Does not implicitly {@link google.cloud.bigquery.migration.v2.PrestoDialect.verify|verify} messages. + * @param message PrestoDialect message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.migration.v2.IPrestoDialect, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a PrestoDialect message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns PrestoDialect + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.migration.v2.PrestoDialect; + + /** + * Decodes a PrestoDialect message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns PrestoDialect + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.migration.v2.PrestoDialect; + + /** + * Verifies a PrestoDialect message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a PrestoDialect message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns PrestoDialect + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.migration.v2.PrestoDialect; + + /** + * Creates a plain object from a PrestoDialect message. Also converts values to other types if specified. + * @param message PrestoDialect + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.migration.v2.PrestoDialect, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this PrestoDialect to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + + /** Properties of a MySQLDialect. */ + interface IMySQLDialect { + } + + /** Represents a MySQLDialect. */ + class MySQLDialect implements IMySQLDialect { + + /** + * Constructs a new MySQLDialect. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.bigquery.migration.v2.IMySQLDialect); + + /** + * Creates a new MySQLDialect instance using the specified properties. + * @param [properties] Properties to set + * @returns MySQLDialect instance + */ + public static create(properties?: google.cloud.bigquery.migration.v2.IMySQLDialect): google.cloud.bigquery.migration.v2.MySQLDialect; + + /** + * Encodes the specified MySQLDialect message. Does not implicitly {@link google.cloud.bigquery.migration.v2.MySQLDialect.verify|verify} messages. + * @param message MySQLDialect message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.bigquery.migration.v2.IMySQLDialect, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified MySQLDialect message, length delimited. Does not implicitly {@link google.cloud.bigquery.migration.v2.MySQLDialect.verify|verify} messages. + * @param message MySQLDialect message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.bigquery.migration.v2.IMySQLDialect, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a MySQLDialect message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns MySQLDialect + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.bigquery.migration.v2.MySQLDialect; + + /** + * Decodes a MySQLDialect message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns MySQLDialect + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.bigquery.migration.v2.MySQLDialect; + + /** + * Verifies a MySQLDialect message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a MySQLDialect message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns MySQLDialect + */ + public static fromObject(object: { [k: string]: any }): google.cloud.bigquery.migration.v2.MySQLDialect; + + /** + * Creates a plain object from a MySQLDialect message. Also converts values to other types if specified. + * @param message MySQLDialect + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.bigquery.migration.v2.MySQLDialect, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this MySQLDialect to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + /** Properties of an ObjectNameMappingList. */ interface IObjectNameMappingList { diff --git a/protos/protos.js b/protos/protos.js index b76c9df..cc33e05 100644 --- a/protos/protos.js +++ b/protos/protos.js @@ -3580,6 +3580,9 @@ * @property {google.cloud.bigquery.migration.v2.IAzureSynapseDialect|null} [azureSynapseDialect] Dialect azureSynapseDialect * @property {google.cloud.bigquery.migration.v2.IVerticaDialect|null} [verticaDialect] Dialect verticaDialect * @property {google.cloud.bigquery.migration.v2.ISQLServerDialect|null} [sqlServerDialect] Dialect sqlServerDialect + * @property {google.cloud.bigquery.migration.v2.IPostgresqlDialect|null} [postgresqlDialect] Dialect postgresqlDialect + * @property {google.cloud.bigquery.migration.v2.IPrestoDialect|null} [prestoDialect] Dialect prestoDialect + * @property {google.cloud.bigquery.migration.v2.IMySQLDialect|null} [mysqlDialect] Dialect mysqlDialect */ /** @@ -3685,17 +3688,41 @@ */ Dialect.prototype.sqlServerDialect = null; + /** + * Dialect postgresqlDialect. + * @member {google.cloud.bigquery.migration.v2.IPostgresqlDialect|null|undefined} postgresqlDialect + * @memberof google.cloud.bigquery.migration.v2.Dialect + * @instance + */ + Dialect.prototype.postgresqlDialect = null; + + /** + * Dialect prestoDialect. + * @member {google.cloud.bigquery.migration.v2.IPrestoDialect|null|undefined} prestoDialect + * @memberof google.cloud.bigquery.migration.v2.Dialect + * @instance + */ + Dialect.prototype.prestoDialect = null; + + /** + * Dialect mysqlDialect. + * @member {google.cloud.bigquery.migration.v2.IMySQLDialect|null|undefined} mysqlDialect + * @memberof google.cloud.bigquery.migration.v2.Dialect + * @instance + */ + Dialect.prototype.mysqlDialect = null; + // OneOf field names bound to virtual getters and setters var $oneOfFields; /** * Dialect dialectValue. - * @member {"bigqueryDialect"|"hiveqlDialect"|"redshiftDialect"|"teradataDialect"|"oracleDialect"|"sparksqlDialect"|"snowflakeDialect"|"netezzaDialect"|"azureSynapseDialect"|"verticaDialect"|"sqlServerDialect"|undefined} dialectValue + * @member {"bigqueryDialect"|"hiveqlDialect"|"redshiftDialect"|"teradataDialect"|"oracleDialect"|"sparksqlDialect"|"snowflakeDialect"|"netezzaDialect"|"azureSynapseDialect"|"verticaDialect"|"sqlServerDialect"|"postgresqlDialect"|"prestoDialect"|"mysqlDialect"|undefined} dialectValue * @memberof google.cloud.bigquery.migration.v2.Dialect * @instance */ Object.defineProperty(Dialect.prototype, "dialectValue", { - get: $util.oneOfGetter($oneOfFields = ["bigqueryDialect", "hiveqlDialect", "redshiftDialect", "teradataDialect", "oracleDialect", "sparksqlDialect", "snowflakeDialect", "netezzaDialect", "azureSynapseDialect", "verticaDialect", "sqlServerDialect"]), + get: $util.oneOfGetter($oneOfFields = ["bigqueryDialect", "hiveqlDialect", "redshiftDialect", "teradataDialect", "oracleDialect", "sparksqlDialect", "snowflakeDialect", "netezzaDialect", "azureSynapseDialect", "verticaDialect", "sqlServerDialect", "postgresqlDialect", "prestoDialect", "mysqlDialect"]), set: $util.oneOfSetter($oneOfFields) }); @@ -3745,6 +3772,12 @@ $root.google.cloud.bigquery.migration.v2.VerticaDialect.encode(message.verticaDialect, writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); if (message.sqlServerDialect != null && Object.hasOwnProperty.call(message, "sqlServerDialect")) $root.google.cloud.bigquery.migration.v2.SQLServerDialect.encode(message.sqlServerDialect, writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); + if (message.postgresqlDialect != null && Object.hasOwnProperty.call(message, "postgresqlDialect")) + $root.google.cloud.bigquery.migration.v2.PostgresqlDialect.encode(message.postgresqlDialect, writer.uint32(/* id 12, wireType 2 =*/98).fork()).ldelim(); + if (message.prestoDialect != null && Object.hasOwnProperty.call(message, "prestoDialect")) + $root.google.cloud.bigquery.migration.v2.PrestoDialect.encode(message.prestoDialect, writer.uint32(/* id 13, wireType 2 =*/106).fork()).ldelim(); + if (message.mysqlDialect != null && Object.hasOwnProperty.call(message, "mysqlDialect")) + $root.google.cloud.bigquery.migration.v2.MySQLDialect.encode(message.mysqlDialect, writer.uint32(/* id 14, wireType 2 =*/114).fork()).ldelim(); return writer; }; @@ -3812,6 +3845,15 @@ case 11: message.sqlServerDialect = $root.google.cloud.bigquery.migration.v2.SQLServerDialect.decode(reader, reader.uint32()); break; + case 12: + message.postgresqlDialect = $root.google.cloud.bigquery.migration.v2.PostgresqlDialect.decode(reader, reader.uint32()); + break; + case 13: + message.prestoDialect = $root.google.cloud.bigquery.migration.v2.PrestoDialect.decode(reader, reader.uint32()); + break; + case 14: + message.mysqlDialect = $root.google.cloud.bigquery.migration.v2.MySQLDialect.decode(reader, reader.uint32()); + break; default: reader.skipType(tag & 7); break; @@ -3956,6 +3998,36 @@ return "sqlServerDialect." + error; } } + if (message.postgresqlDialect != null && message.hasOwnProperty("postgresqlDialect")) { + if (properties.dialectValue === 1) + return "dialectValue: multiple values"; + properties.dialectValue = 1; + { + var error = $root.google.cloud.bigquery.migration.v2.PostgresqlDialect.verify(message.postgresqlDialect); + if (error) + return "postgresqlDialect." + error; + } + } + if (message.prestoDialect != null && message.hasOwnProperty("prestoDialect")) { + if (properties.dialectValue === 1) + return "dialectValue: multiple values"; + properties.dialectValue = 1; + { + var error = $root.google.cloud.bigquery.migration.v2.PrestoDialect.verify(message.prestoDialect); + if (error) + return "prestoDialect." + error; + } + } + if (message.mysqlDialect != null && message.hasOwnProperty("mysqlDialect")) { + if (properties.dialectValue === 1) + return "dialectValue: multiple values"; + properties.dialectValue = 1; + { + var error = $root.google.cloud.bigquery.migration.v2.MySQLDialect.verify(message.mysqlDialect); + if (error) + return "mysqlDialect." + error; + } + } return null; }; @@ -4026,6 +4098,21 @@ throw TypeError(".google.cloud.bigquery.migration.v2.Dialect.sqlServerDialect: object expected"); message.sqlServerDialect = $root.google.cloud.bigquery.migration.v2.SQLServerDialect.fromObject(object.sqlServerDialect); } + if (object.postgresqlDialect != null) { + if (typeof object.postgresqlDialect !== "object") + throw TypeError(".google.cloud.bigquery.migration.v2.Dialect.postgresqlDialect: object expected"); + message.postgresqlDialect = $root.google.cloud.bigquery.migration.v2.PostgresqlDialect.fromObject(object.postgresqlDialect); + } + if (object.prestoDialect != null) { + if (typeof object.prestoDialect !== "object") + throw TypeError(".google.cloud.bigquery.migration.v2.Dialect.prestoDialect: object expected"); + message.prestoDialect = $root.google.cloud.bigquery.migration.v2.PrestoDialect.fromObject(object.prestoDialect); + } + if (object.mysqlDialect != null) { + if (typeof object.mysqlDialect !== "object") + throw TypeError(".google.cloud.bigquery.migration.v2.Dialect.mysqlDialect: object expected"); + message.mysqlDialect = $root.google.cloud.bigquery.migration.v2.MySQLDialect.fromObject(object.mysqlDialect); + } return message; }; @@ -4097,6 +4184,21 @@ if (options.oneofs) object.dialectValue = "sqlServerDialect"; } + if (message.postgresqlDialect != null && message.hasOwnProperty("postgresqlDialect")) { + object.postgresqlDialect = $root.google.cloud.bigquery.migration.v2.PostgresqlDialect.toObject(message.postgresqlDialect, options); + if (options.oneofs) + object.dialectValue = "postgresqlDialect"; + } + if (message.prestoDialect != null && message.hasOwnProperty("prestoDialect")) { + object.prestoDialect = $root.google.cloud.bigquery.migration.v2.PrestoDialect.toObject(message.prestoDialect, options); + if (options.oneofs) + object.dialectValue = "prestoDialect"; + } + if (message.mysqlDialect != null && message.hasOwnProperty("mysqlDialect")) { + object.mysqlDialect = $root.google.cloud.bigquery.migration.v2.MySQLDialect.toObject(message.mysqlDialect, options); + if (options.oneofs) + object.dialectValue = "mysqlDialect"; + } return object; }; @@ -5935,6 +6037,486 @@ return SQLServerDialect; })(); + v2.PostgresqlDialect = (function() { + + /** + * Properties of a PostgresqlDialect. + * @memberof google.cloud.bigquery.migration.v2 + * @interface IPostgresqlDialect + */ + + /** + * Constructs a new PostgresqlDialect. + * @memberof google.cloud.bigquery.migration.v2 + * @classdesc Represents a PostgresqlDialect. + * @implements IPostgresqlDialect + * @constructor + * @param {google.cloud.bigquery.migration.v2.IPostgresqlDialect=} [properties] Properties to set + */ + function PostgresqlDialect(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Creates a new PostgresqlDialect instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.migration.v2.PostgresqlDialect + * @static + * @param {google.cloud.bigquery.migration.v2.IPostgresqlDialect=} [properties] Properties to set + * @returns {google.cloud.bigquery.migration.v2.PostgresqlDialect} PostgresqlDialect instance + */ + PostgresqlDialect.create = function create(properties) { + return new PostgresqlDialect(properties); + }; + + /** + * Encodes the specified PostgresqlDialect message. Does not implicitly {@link google.cloud.bigquery.migration.v2.PostgresqlDialect.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.migration.v2.PostgresqlDialect + * @static + * @param {google.cloud.bigquery.migration.v2.IPostgresqlDialect} message PostgresqlDialect message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PostgresqlDialect.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + return writer; + }; + + /** + * Encodes the specified PostgresqlDialect message, length delimited. Does not implicitly {@link google.cloud.bigquery.migration.v2.PostgresqlDialect.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.migration.v2.PostgresqlDialect + * @static + * @param {google.cloud.bigquery.migration.v2.IPostgresqlDialect} message PostgresqlDialect message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PostgresqlDialect.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a PostgresqlDialect message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.migration.v2.PostgresqlDialect + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.migration.v2.PostgresqlDialect} PostgresqlDialect + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PostgresqlDialect.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.migration.v2.PostgresqlDialect(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a PostgresqlDialect message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.migration.v2.PostgresqlDialect + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.migration.v2.PostgresqlDialect} PostgresqlDialect + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PostgresqlDialect.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a PostgresqlDialect message. + * @function verify + * @memberof google.cloud.bigquery.migration.v2.PostgresqlDialect + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + PostgresqlDialect.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + return null; + }; + + /** + * Creates a PostgresqlDialect message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.migration.v2.PostgresqlDialect + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.migration.v2.PostgresqlDialect} PostgresqlDialect + */ + PostgresqlDialect.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.migration.v2.PostgresqlDialect) + return object; + return new $root.google.cloud.bigquery.migration.v2.PostgresqlDialect(); + }; + + /** + * Creates a plain object from a PostgresqlDialect message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.migration.v2.PostgresqlDialect + * @static + * @param {google.cloud.bigquery.migration.v2.PostgresqlDialect} message PostgresqlDialect + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + PostgresqlDialect.toObject = function toObject() { + return {}; + }; + + /** + * Converts this PostgresqlDialect to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.migration.v2.PostgresqlDialect + * @instance + * @returns {Object.} JSON object + */ + PostgresqlDialect.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return PostgresqlDialect; + })(); + + v2.PrestoDialect = (function() { + + /** + * Properties of a PrestoDialect. + * @memberof google.cloud.bigquery.migration.v2 + * @interface IPrestoDialect + */ + + /** + * Constructs a new PrestoDialect. + * @memberof google.cloud.bigquery.migration.v2 + * @classdesc Represents a PrestoDialect. + * @implements IPrestoDialect + * @constructor + * @param {google.cloud.bigquery.migration.v2.IPrestoDialect=} [properties] Properties to set + */ + function PrestoDialect(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Creates a new PrestoDialect instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.migration.v2.PrestoDialect + * @static + * @param {google.cloud.bigquery.migration.v2.IPrestoDialect=} [properties] Properties to set + * @returns {google.cloud.bigquery.migration.v2.PrestoDialect} PrestoDialect instance + */ + PrestoDialect.create = function create(properties) { + return new PrestoDialect(properties); + }; + + /** + * Encodes the specified PrestoDialect message. Does not implicitly {@link google.cloud.bigquery.migration.v2.PrestoDialect.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.migration.v2.PrestoDialect + * @static + * @param {google.cloud.bigquery.migration.v2.IPrestoDialect} message PrestoDialect message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PrestoDialect.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + return writer; + }; + + /** + * Encodes the specified PrestoDialect message, length delimited. Does not implicitly {@link google.cloud.bigquery.migration.v2.PrestoDialect.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.migration.v2.PrestoDialect + * @static + * @param {google.cloud.bigquery.migration.v2.IPrestoDialect} message PrestoDialect message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + PrestoDialect.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a PrestoDialect message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.migration.v2.PrestoDialect + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.migration.v2.PrestoDialect} PrestoDialect + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PrestoDialect.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.migration.v2.PrestoDialect(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a PrestoDialect message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.migration.v2.PrestoDialect + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.migration.v2.PrestoDialect} PrestoDialect + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + PrestoDialect.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a PrestoDialect message. + * @function verify + * @memberof google.cloud.bigquery.migration.v2.PrestoDialect + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + PrestoDialect.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + return null; + }; + + /** + * Creates a PrestoDialect message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.migration.v2.PrestoDialect + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.migration.v2.PrestoDialect} PrestoDialect + */ + PrestoDialect.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.migration.v2.PrestoDialect) + return object; + return new $root.google.cloud.bigquery.migration.v2.PrestoDialect(); + }; + + /** + * Creates a plain object from a PrestoDialect message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.migration.v2.PrestoDialect + * @static + * @param {google.cloud.bigquery.migration.v2.PrestoDialect} message PrestoDialect + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + PrestoDialect.toObject = function toObject() { + return {}; + }; + + /** + * Converts this PrestoDialect to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.migration.v2.PrestoDialect + * @instance + * @returns {Object.} JSON object + */ + PrestoDialect.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return PrestoDialect; + })(); + + v2.MySQLDialect = (function() { + + /** + * Properties of a MySQLDialect. + * @memberof google.cloud.bigquery.migration.v2 + * @interface IMySQLDialect + */ + + /** + * Constructs a new MySQLDialect. + * @memberof google.cloud.bigquery.migration.v2 + * @classdesc Represents a MySQLDialect. + * @implements IMySQLDialect + * @constructor + * @param {google.cloud.bigquery.migration.v2.IMySQLDialect=} [properties] Properties to set + */ + function MySQLDialect(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * Creates a new MySQLDialect instance using the specified properties. + * @function create + * @memberof google.cloud.bigquery.migration.v2.MySQLDialect + * @static + * @param {google.cloud.bigquery.migration.v2.IMySQLDialect=} [properties] Properties to set + * @returns {google.cloud.bigquery.migration.v2.MySQLDialect} MySQLDialect instance + */ + MySQLDialect.create = function create(properties) { + return new MySQLDialect(properties); + }; + + /** + * Encodes the specified MySQLDialect message. Does not implicitly {@link google.cloud.bigquery.migration.v2.MySQLDialect.verify|verify} messages. + * @function encode + * @memberof google.cloud.bigquery.migration.v2.MySQLDialect + * @static + * @param {google.cloud.bigquery.migration.v2.IMySQLDialect} message MySQLDialect message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MySQLDialect.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + return writer; + }; + + /** + * Encodes the specified MySQLDialect message, length delimited. Does not implicitly {@link google.cloud.bigquery.migration.v2.MySQLDialect.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.bigquery.migration.v2.MySQLDialect + * @static + * @param {google.cloud.bigquery.migration.v2.IMySQLDialect} message MySQLDialect message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + MySQLDialect.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a MySQLDialect message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.bigquery.migration.v2.MySQLDialect + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.bigquery.migration.v2.MySQLDialect} MySQLDialect + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MySQLDialect.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.bigquery.migration.v2.MySQLDialect(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a MySQLDialect message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.bigquery.migration.v2.MySQLDialect + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.bigquery.migration.v2.MySQLDialect} MySQLDialect + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + MySQLDialect.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a MySQLDialect message. + * @function verify + * @memberof google.cloud.bigquery.migration.v2.MySQLDialect + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + MySQLDialect.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + return null; + }; + + /** + * Creates a MySQLDialect message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.bigquery.migration.v2.MySQLDialect + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.bigquery.migration.v2.MySQLDialect} MySQLDialect + */ + MySQLDialect.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.bigquery.migration.v2.MySQLDialect) + return object; + return new $root.google.cloud.bigquery.migration.v2.MySQLDialect(); + }; + + /** + * Creates a plain object from a MySQLDialect message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.bigquery.migration.v2.MySQLDialect + * @static + * @param {google.cloud.bigquery.migration.v2.MySQLDialect} message MySQLDialect + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + MySQLDialect.toObject = function toObject() { + return {}; + }; + + /** + * Converts this MySQLDialect to JSON. + * @function toJSON + * @memberof google.cloud.bigquery.migration.v2.MySQLDialect + * @instance + * @returns {Object.} JSON object + */ + MySQLDialect.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return MySQLDialect; + })(); + v2.ObjectNameMappingList = (function() { /** diff --git a/protos/protos.json b/protos/protos.json index 61364ce..e998914 100644 --- a/protos/protos.json +++ b/protos/protos.json @@ -422,7 +422,10 @@ "netezzaDialect", "azureSynapseDialect", "verticaDialect", - "sqlServerDialect" + "sqlServerDialect", + "postgresqlDialect", + "prestoDialect", + "mysqlDialect" ] } }, @@ -470,6 +473,18 @@ "sqlServerDialect": { "type": "SQLServerDialect", "id": 11 + }, + "postgresqlDialect": { + "type": "PostgresqlDialect", + "id": 12 + }, + "prestoDialect": { + "type": "PrestoDialect", + "id": 13 + }, + "mysqlDialect": { + "type": "MySQLDialect", + "id": 14 } } }, @@ -520,6 +535,15 @@ "SQLServerDialect": { "fields": {} }, + "PostgresqlDialect": { + "fields": {} + }, + "PrestoDialect": { + "fields": {} + }, + "MySQLDialect": { + "fields": {} + }, "ObjectNameMappingList": { "fields": { "nameMap": { diff --git a/system-test/fixtures/sample/src/index.js b/system-test/fixtures/sample/src/index.js index 66ee2fb..164dcc0 100644 --- a/system-test/fixtures/sample/src/index.js +++ b/system-test/fixtures/sample/src/index.js @@ -17,7 +17,7 @@ // ** All changes to this file may be overwritten. ** /* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/bigquery-migration'); +const migration = require('@google-cloud/migration'); function main() { const migrationServiceClient = new migration.MigrationServiceClient(); diff --git a/system-test/fixtures/sample/src/index.ts b/system-test/fixtures/sample/src/index.ts index 80fbe2d..0afe940 100644 --- a/system-test/fixtures/sample/src/index.ts +++ b/system-test/fixtures/sample/src/index.ts @@ -16,7 +16,7 @@ // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** -import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; +import {MigrationServiceClient} from '@google-cloud/migration'; // check that the client class type name can be used function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { From 4e2979307f94e83d5a53911d09ffeae0af958f79 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 19 Aug 2022 20:08:49 +0000 Subject: [PATCH 03/13] chore: remove unused proto imports PiperOrigin-RevId: 468735472 Source-Link: https://github.com/googleapis/googleapis/commit/cfa1b3782da7ccae31673d45401a0b79d2d4a84b Source-Link: https://github.com/googleapis/googleapis-gen/commit/09b7666656510f5b00b893f003a0ba5766f9e250 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDliNzY2NjY1NjUxMGY1YjAwYjg5M2YwMDNhMGJhNTc2NmY5ZTI1MCJ9 --- owl-bot-staging/v2/.eslintignore | 7 + owl-bot-staging/v2/.eslintrc.json | 3 + owl-bot-staging/v2/.gitignore | 14 + owl-bot-staging/v2/.jsdoc.js | 55 + owl-bot-staging/v2/.mocharc.js | 33 + owl-bot-staging/v2/.prettierrc.js | 22 + owl-bot-staging/v2/README.md | 1 + owl-bot-staging/v2/linkinator.config.json | 16 + owl-bot-staging/v2/package.json | 64 + .../migration/v2/migration_entities.proto | 233 +++ .../v2/migration_error_details.proto | 62 + .../migration/v2/migration_metrics.proto | 111 ++ .../migration/v2/migration_service.proto | 245 ++++ .../migration/v2/translation_config.proto | 257 ++++ ...ation_service.create_migration_workflow.js | 64 + ...ation_service.delete_migration_workflow.js | 59 + ...migration_service.get_migration_subtask.js | 63 + ...igration_service.get_migration_workflow.js | 63 + ...gration_service.list_migration_subtasks.js | 83 ++ ...ration_service.list_migration_workflows.js | 77 + ...ration_service.start_migration_workflow.js | 59 + ...ta.google.cloud.bigquery.migration.v2.json | 335 +++++ owl-bot-staging/v2/src/index.ts | 25 + owl-bot-staging/v2/src/v2/gapic_metadata.json | 101 ++ owl-bot-staging/v2/src/v2/index.ts | 19 + .../v2/src/v2/migration_service_client.ts | 1246 ++++++++++++++++ .../v2/migration_service_client_config.json | 71 + .../src/v2/migration_service_proto_list.json | 7 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 32 + owl-bot-staging/v2/system-test/install.ts | 49 + .../v2/test/gapic_migration_service_v2.ts | 1254 +++++++++++++++++ owl-bot-staging/v2/tsconfig.json | 19 + owl-bot-staging/v2/webpack.config.js | 64 + owl-bot-staging/v2alpha/.eslintignore | 7 + owl-bot-staging/v2alpha/.eslintrc.json | 3 + owl-bot-staging/v2alpha/.gitignore | 14 + owl-bot-staging/v2alpha/.jsdoc.js | 55 + owl-bot-staging/v2alpha/.mocharc.js | 33 + owl-bot-staging/v2alpha/.prettierrc.js | 22 + owl-bot-staging/v2alpha/README.md | 1 + .../v2alpha/linkinator.config.json | 16 + owl-bot-staging/v2alpha/package.json | 64 + .../migration/v2alpha/assessment_task.proto | 49 + .../v2alpha/migration_entities.proto | 244 ++++ .../v2alpha/migration_error_details.proto | 62 + .../migration/v2alpha/migration_metrics.proto | 111 ++ .../migration/v2alpha/migration_service.proto | 247 ++++ .../migration/v2alpha/translation_task.proto | 207 +++ ...ation_service.create_migration_workflow.js | 64 + ...ation_service.delete_migration_workflow.js | 59 + ...migration_service.get_migration_subtask.js | 63 + ...igration_service.get_migration_workflow.js | 63 + ...gration_service.list_migration_subtasks.js | 83 ++ ...ration_service.list_migration_workflows.js | 77 + ...ration_service.start_migration_workflow.js | 59 + ...ogle.cloud.bigquery.migration.v2alpha.json | 335 +++++ owl-bot-staging/v2alpha/src/index.ts | 25 + .../v2alpha/src/v2alpha/gapic_metadata.json | 101 ++ owl-bot-staging/v2alpha/src/v2alpha/index.ts | 19 + .../src/v2alpha/migration_service_client.ts | 1246 ++++++++++++++++ .../migration_service_client_config.json | 73 + .../v2alpha/migration_service_proto_list.json | 8 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 32 + .../v2alpha/system-test/install.ts | 49 + .../test/gapic_migration_service_v2alpha.ts | 1254 +++++++++++++++++ owl-bot-staging/v2alpha/tsconfig.json | 19 + owl-bot-staging/v2alpha/webpack.config.js | 64 + 69 files changed, 9695 insertions(+) create mode 100644 owl-bot-staging/v2/.eslintignore create mode 100644 owl-bot-staging/v2/.eslintrc.json create mode 100644 owl-bot-staging/v2/.gitignore create mode 100644 owl-bot-staging/v2/.jsdoc.js create mode 100644 owl-bot-staging/v2/.mocharc.js create mode 100644 owl-bot-staging/v2/.prettierrc.js create mode 100644 owl-bot-staging/v2/README.md create mode 100644 owl-bot-staging/v2/linkinator.config.json create mode 100644 owl-bot-staging/v2/package.json create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json create mode 100644 owl-bot-staging/v2/src/index.ts create mode 100644 owl-bot-staging/v2/src/v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/src/v2/index.ts create mode 100644 owl-bot-staging/v2/src/v2/migration_service_client.ts create mode 100644 owl-bot-staging/v2/src/v2/migration_service_client_config.json create mode 100644 owl-bot-staging/v2/src/v2/migration_service_proto_list.json create mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v2/system-test/install.ts create mode 100644 owl-bot-staging/v2/test/gapic_migration_service_v2.ts create mode 100644 owl-bot-staging/v2/tsconfig.json create mode 100644 owl-bot-staging/v2/webpack.config.js create mode 100644 owl-bot-staging/v2alpha/.eslintignore create mode 100644 owl-bot-staging/v2alpha/.eslintrc.json create mode 100644 owl-bot-staging/v2alpha/.gitignore create mode 100644 owl-bot-staging/v2alpha/.jsdoc.js create mode 100644 owl-bot-staging/v2alpha/.mocharc.js create mode 100644 owl-bot-staging/v2alpha/.prettierrc.js create mode 100644 owl-bot-staging/v2alpha/README.md create mode 100644 owl-bot-staging/v2alpha/linkinator.config.json create mode 100644 owl-bot-staging/v2alpha/package.json create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json create mode 100644 owl-bot-staging/v2alpha/src/index.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/index.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json create mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v2alpha/system-test/install.ts create mode 100644 owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts create mode 100644 owl-bot-staging/v2alpha/tsconfig.json create mode 100644 owl-bot-staging/v2alpha/webpack.config.js diff --git a/owl-bot-staging/v2/.eslintignore b/owl-bot-staging/v2/.eslintignore new file mode 100644 index 0000000..cfc348e --- /dev/null +++ b/owl-bot-staging/v2/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v2/.eslintrc.json b/owl-bot-staging/v2/.eslintrc.json new file mode 100644 index 0000000..7821534 --- /dev/null +++ b/owl-bot-staging/v2/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v2/.gitignore b/owl-bot-staging/v2/.gitignore new file mode 100644 index 0000000..5d32b23 --- /dev/null +++ b/owl-bot-staging/v2/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v2/.jsdoc.js b/owl-bot-staging/v2/.jsdoc.js new file mode 100644 index 0000000..aabe555 --- /dev/null +++ b/owl-bot-staging/v2/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/migration', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v2/.mocharc.js b/owl-bot-staging/v2/.mocharc.js new file mode 100644 index 0000000..481c522 --- /dev/null +++ b/owl-bot-staging/v2/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v2/.prettierrc.js b/owl-bot-staging/v2/.prettierrc.js new file mode 100644 index 0000000..494e147 --- /dev/null +++ b/owl-bot-staging/v2/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v2/README.md b/owl-bot-staging/v2/README.md new file mode 100644 index 0000000..4e0341c --- /dev/null +++ b/owl-bot-staging/v2/README.md @@ -0,0 +1 @@ +Migration: Nodejs Client diff --git a/owl-bot-staging/v2/linkinator.config.json b/owl-bot-staging/v2/linkinator.config.json new file mode 100644 index 0000000..befd23c --- /dev/null +++ b/owl-bot-staging/v2/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2/package.json b/owl-bot-staging/v2/package.json new file mode 100644 index 0000000..6b38f7b --- /dev/null +++ b/owl-bot-staging/v2/package.json @@ -0,0 +1,64 @@ +{ + "name": "@google-cloud/migration", + "version": "0.1.0", + "description": "Migration client for Node.js", + "repository": "googleapis/nodejs-migration", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google migration", + "migration", + "migration service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^3.1.1" + }, + "devDependencies": { + "@types/mocha": "^9.1.0", + "@types/node": "^16.0.0", + "@types/sinon": "^10.0.8", + "c8": "^7.11.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.7", + "jsdoc-fresh": "^2.0.0", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^3.0.0", + "mocha": "^10.0.0", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^9.2.6", + "typescript": "^4.5.5", + "webpack": "^5.67.0", + "webpack-cli": "^4.9.1" + }, + "engines": { + "node": ">=v12" + } +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto new file mode 100644 index 0000000..7d77bae --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto @@ -0,0 +1,233 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2/migration_metrics.proto"; +import "google/cloud/bigquery/migration/v2/translation_config.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. + // The ID is server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // The details of the task. + oneof task_details { + // Task configuration for Batch SQL Translation. + TranslationConfigDetails translation_config_details = 14; + } + + // Output only. Immutable. The unique identifier for the migration task. The + // ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be one of the supported task types: + // Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ, + // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, + // Translation_Snowflake2BQ, Translation_Netezza2BQ, + // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, + // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. + string type = 2; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in + // FAILED state. + google.rpc.ErrorInfo processing_error = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID + // is server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in + // FAILED state. + google.rpc.ErrorInfo processing_error = 6 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while + // processing the subtask. Presence of error details does not mean that the + // subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto new file mode 100644 index 0000000..199e2db --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto new file mode 100644 index 0000000..e52fead --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately `+/-9.2x10^18`. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately `+/-10^(+/-300)` and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto new file mode 100644 index 0000000..3c1a89e --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto @@ -0,0 +1,245 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2/migration_entities.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto new file mode 100644 index 0000000..994140d --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto @@ -0,0 +1,257 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "TranslationConfigProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// The translation config to capture necessary settings for a translation task +// and subtask. +message TranslationConfigDetails { + // The chosen path where the source for input files will be found. + oneof source_location { + // The Cloud Storage path for a directory of files to translate in a task. + string gcs_source_path = 1; + } + + // The chosen path where the destination for output files will be found. + oneof target_location { + // The Cloud Storage path to write back the corresponding input files to. + string gcs_target_path = 2; + } + + // The dialect of the input files. + Dialect source_dialect = 3; + + // The target dialect for the engine to translate the input to. + Dialect target_dialect = 4; + + // The mapping of full SQL object names from their current state to the + // desired output. + oneof output_name_mapping { + // The mapping of objects to their desired output names in list form. + ObjectNameMappingList name_mapping_list = 5; + } + + // The default source environment values for the translation. + SourceEnv source_env = 6; +} + +// The possible dialect options for translation. +message Dialect { + // The possible dialect options that this message represents. + oneof dialect_value { + // The BigQuery dialect + BigQueryDialect bigquery_dialect = 1; + + // The HiveQL dialect + HiveQLDialect hiveql_dialect = 2; + + // The Redshift dialect + RedshiftDialect redshift_dialect = 3; + + // The Teradata dialect + TeradataDialect teradata_dialect = 4; + + // The Oracle dialect + OracleDialect oracle_dialect = 5; + + // The SparkSQL dialect + SparkSQLDialect sparksql_dialect = 6; + + // The Snowflake dialect + SnowflakeDialect snowflake_dialect = 7; + + // The Netezza dialect + NetezzaDialect netezza_dialect = 8; + + // The Azure Synapse dialect + AzureSynapseDialect azure_synapse_dialect = 9; + + // The Vertica dialect + VerticaDialect vertica_dialect = 10; + + // The SQL Server dialect + SQLServerDialect sql_server_dialect = 11; + + // The Postgresql dialect + PostgresqlDialect postgresql_dialect = 12; + + // The Presto dialect + PrestoDialect presto_dialect = 13; + + // The MySQL dialect + MySQLDialect mysql_dialect = 14; + } +} + +// The dialect definition for BigQuery. +message BigQueryDialect {} + +// The dialect definition for HiveQL. +message HiveQLDialect {} + +// The dialect definition for Redshift. +message RedshiftDialect {} + +// The dialect definition for Teradata. +message TeradataDialect { + // The sub-dialect options for Teradata. + enum Mode { + // Unspecified mode. + MODE_UNSPECIFIED = 0; + + // Teradata SQL mode. + SQL = 1; + + // BTEQ mode (which includes SQL). + BTEQ = 2; + } + + // Which Teradata sub-dialect mode the user specifies. + Mode mode = 1; +} + +// The dialect definition for Oracle. +message OracleDialect {} + +// The dialect definition for SparkSQL. +message SparkSQLDialect {} + +// The dialect definition for Snowflake. +message SnowflakeDialect {} + +// The dialect definition for Netezza. +message NetezzaDialect {} + +// The dialect definition for Azure Synapse. +message AzureSynapseDialect {} + +// The dialect definition for Vertica. +message VerticaDialect {} + +// The dialect definition for SQL Server. +message SQLServerDialect {} + +// The dialect definition for Postgresql. +message PostgresqlDialect {} + +// The dialect definition for Presto. +message PrestoDialect {} + +// The dialect definition for MySQL. +message MySQLDialect {} + +// Represents a map of name mappings using a list of key:value proto messages of +// existing name to desired output name. +message ObjectNameMappingList { + // The elements of the object name map. + repeated ObjectNameMapping name_map = 1; +} + +// Represents a key-value pair of NameMappingKey to NameMappingValue to +// represent the mapping of SQL names from the input value to desired output. +message ObjectNameMapping { + // The name of the object in source that is being mapped. + NameMappingKey source = 1; + + // The desired target name of the object that is being mapped. + NameMappingValue target = 2; +} + +// The potential components of a full name mapping that will be mapped +// during translation in the source data warehouse. +message NameMappingKey { + // The type of the object that is being mapped. + enum Type { + // Unspecified name mapping type. + TYPE_UNSPECIFIED = 0; + + // The object being mapped is a database. + DATABASE = 1; + + // The object being mapped is a schema. + SCHEMA = 2; + + // The object being mapped is a relation. + RELATION = 3; + + // The object being mapped is an attribute. + ATTRIBUTE = 4; + + // The object being mapped is a relation alias. + RELATION_ALIAS = 5; + + // The object being mapped is a an attribute alias. + ATTRIBUTE_ALIAS = 6; + + // The object being mapped is a function. + FUNCTION = 7; + } + + // The type of object that is being mapped. + Type type = 1; + + // The database name (BigQuery project ID equivalent in the source data + // warehouse). + string database = 2; + + // The schema name (BigQuery dataset equivalent in the source data warehouse). + string schema = 3; + + // The relation name (BigQuery table or view equivalent in the source data + // warehouse). + string relation = 4; + + // The attribute name (BigQuery column equivalent in the source data + // warehouse). + string attribute = 5; +} + +// The potential components of a full name mapping that will be mapped +// during translation in the target data warehouse. +message NameMappingValue { + // The database name (BigQuery project ID equivalent in the target data + // warehouse). + string database = 1; + + // The schema name (BigQuery dataset equivalent in the target data warehouse). + string schema = 2; + + // The relation name (BigQuery table or view equivalent in the target data + // warehouse). + string relation = 3; + + // The attribute name (BigQuery column equivalent in the target data + // warehouse). + string attribute = 4; +} + +// Represents the default source environment values for the translation. +message SourceEnv { + // The default database name to fully qualify SQL objects when their database + // name is missing. + string default_database = 1; + + // The schema search path. When SQL objects are missing schema name, + // translation engine will search through this list to find the value. + repeated string schema_search_path = 2; +} diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js new file mode 100644 index 0000000..9aed895 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js @@ -0,0 +1,64 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, migrationWorkflow) { + // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + */ + // const parent = 'abc123' + /** + * Required. The migration workflow to create. + */ + // const migrationWorkflow = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callCreateMigrationWorkflow() { + // Construct request + const request = { + parent, + migrationWorkflow, + }; + + // Run request + const response = await migrationClient.createMigrationWorkflow(request); + console.log(response); + } + + callCreateMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js new file mode 100644 index 0000000..aeda293 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js @@ -0,0 +1,59 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callDeleteMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.deleteMigrationWorkflow(request); + console.log(response); + } + + callDeleteMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js new file mode 100644 index 0000000..df6f7d2 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js @@ -0,0 +1,63 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + */ + // const name = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationSubtask() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationSubtask(request); + console.log(response); + } + + callGetMigrationSubtask(); + // [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js new file mode 100644 index 0000000..72f7565 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js @@ -0,0 +1,63 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationWorkflow(request); + console.log(response); + } + + callGetMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js new file mode 100644 index 0000000..95e5833 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js @@ -0,0 +1,83 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const parent = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + /** + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + /** + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + */ + // const filter = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationSubtasks() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationSubtasksAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationSubtasks(); + // [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js new file mode 100644 index 0000000..63402d4 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js @@ -0,0 +1,77 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + */ + // const parent = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + /** + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationWorkflows() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationWorkflowsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationWorkflows(); + // [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js new file mode 100644 index 0000000..76c614c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js @@ -0,0 +1,59 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callStartMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.startMigrationWorkflow(request); + console.log(response); + } + + callStartMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json new file mode 100644 index 0000000..c5cd62c --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json @@ -0,0 +1,335 @@ +{ + "clientLibrary": { + "name": "nodejs-migration", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2", + "version": "v2" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", + "title": "MigrationService createMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Creates a migration workflow.", + "canonical": true, + "file": "migration_service.create_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 56, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "migration_workflow", + "type": ".google.cloud.bigquery.migration.v2.MigrationWorkflow" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", + "title": "MigrationService getMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration workflow.", + "canonical": true, + "file": "migration_service.get_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 55, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", + "title": "MigrationService listMigrationWorkflows Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration workflow.", + "canonical": true, + "file": "migration_service.list_migration_workflows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 69, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", + "title": "MigrationService deleteMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Deletes a migration workflow by name.", + "canonical": true, + "file": "migration_service.delete_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", + "title": "MigrationService startMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", + "canonical": true, + "file": "migration_service.start_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", + "title": "MigrationService getMigrationSubtask Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration subtask.", + "canonical": true, + "file": "migration_service.get_migration_subtask.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 55, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationSubtask", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", + "title": "MigrationService listMigrationSubtasks Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration subtasks.", + "canonical": true, + "file": "migration_service.list_migration_subtasks.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 75, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v2/src/index.ts b/owl-bot-staging/v2/src/index.ts new file mode 100644 index 0000000..35a8fd9 --- /dev/null +++ b/owl-bot-staging/v2/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v2 from './v2'; +const MigrationServiceClient = v2.MigrationServiceClient; +type MigrationServiceClient = v2.MigrationServiceClient; +export {v2, MigrationServiceClient}; +export default {v2, MigrationServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v2/src/v2/gapic_metadata.json b/owl-bot-staging/v2/src/v2/gapic_metadata.json new file mode 100644 index 0000000..e0aa12f --- /dev/null +++ b/owl-bot-staging/v2/src/v2/gapic_metadata.json @@ -0,0 +1,101 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.migration.v2", + "libraryPackage": "@google-cloud/migration", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/src/v2/index.ts b/owl-bot-staging/v2/src/v2/index.ts new file mode 100644 index 0000000..f75d208 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2/src/v2/migration_service_client.ts b/owl-bot-staging/v2/src/v2/migration_service_client.ts new file mode 100644 index 0000000..20f6e71 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_client.ts @@ -0,0 +1,1246 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; + +import { Transform } from 'stream'; +import { RequestType } from 'google-gax/build/src/apitypes'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v2/migration_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './migration_service_client_config.json'; + +const version = require('../../../package.json').version; + +/** + * Service to handle EDW migrations. + * @class + * @memberof v2 + */ +export class MigrationServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + migrationServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MigrationServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof MigrationServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' + ), + migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listMigrationWorkflows: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), + listMigrationSubtasks: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.migration.v2.MigrationService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.migrationServiceStub) { + return this.migrationServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.migration.v2.MigrationService. + this.migrationServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2.MigrationService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.migration.v2.MigrationService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const migrationServiceStubMethods = + ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; + for (const methodName of migrationServiceStubMethods) { + const callPromise = this.migrationServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.migrationServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates a migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param {google.cloud.bigquery.migration.v2.MigrationWorkflow} request.migrationWorkflow + * Required. The migration workflow to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.create_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async + */ + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.get_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async + */ + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationWorkflow(request, options, callback); + } +/** + * Deletes a migration workflow by name. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.delete_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async + */ + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); + } +/** + * Starts a previously created migration workflow. I.e., the state transitions + * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + * An error will be signaled if the state is anything other than DRAFT or + * RUNNING. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.start_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async + */ + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.startMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration subtask. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.get_migration_subtask.js + * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async + */ + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationSubtask(request, options, callback); + } + + /** + * Lists previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse + ]>; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationWorkflows(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflowsStream( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.createStream( + this.innerApiCalls.listMigrationWorkflows as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationWorkflows`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.list_migration_workflows.js + * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async + */ + listMigrationWorkflowsAsync( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.asyncIterate( + this.innerApiCalls['listMigrationWorkflows'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + /** + * Lists previously created migration subtasks. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse + ]>; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationSubtasks(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasksStream( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.createStream( + this.innerApiCalls.listMigrationSubtasks as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationSubtasks`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.list_migration_subtasks.js + * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async + */ + listMigrationSubtasksAsync( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.asyncIterate( + this.innerApiCalls['listMigrationSubtasks'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified migrationSubtask resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @param {string} subtask + * @returns {string} Resource name string. + */ + migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { + return this.pathTemplates.migrationSubtaskPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + subtask: subtask, + }); + } + + /** + * Parse the project from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; + } + + /** + * Parse the location from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; + } + + /** + * Parse the workflow from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; + } + + /** + * Parse the subtask from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the subtask. + */ + matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; + } + + /** + * Return a fully-qualified migrationWorkflow resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @returns {string} Resource name string. + */ + migrationWorkflowPath(project:string,location:string,workflow:string) { + return this.pathTemplates.migrationWorkflowPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + }); + } + + /** + * Parse the project from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; + } + + /** + * Parse the location from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; + } + + /** + * Parse the workflow from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.migrationServiceStub && !this._terminated) { + return this.migrationServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v2/src/v2/migration_service_client_config.json b/owl-bot-staging/v2/src/v2/migration_service_client_config.json new file mode 100644 index 0000000..5832815 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_client_config.json @@ -0,0 +1,71 @@ +{ + "interfaces": { + "google.cloud.bigquery.migration.v2.MigrationService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 10000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationWorkflows": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "DeleteMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "GetMigrationSubtask": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ListMigrationSubtasks": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json new file mode 100644 index 0000000..57df7ab --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json @@ -0,0 +1,7 @@ +[ + "../../protos/google/cloud/bigquery/migration/v2/migration_entities.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_error_details.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_metrics.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_service.proto", + "../../protos/google/cloud/bigquery/migration/v2/translation_config.proto" +] diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js new file mode 100644 index 0000000..aa2c893 --- /dev/null +++ b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const migration = require('@google-cloud/migration'); + +function main() { + const migrationServiceClient = new migration.MigrationServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts new file mode 100644 index 0000000..0afe940 --- /dev/null +++ b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,32 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {MigrationServiceClient} from '@google-cloud/migration'; + +// check that the client class type name can be used +function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const migrationServiceClient = new MigrationServiceClient(); + doStuffWithMigrationServiceClient(migrationServiceClient); +} + +main(); diff --git a/owl-bot-staging/v2/system-test/install.ts b/owl-bot-staging/v2/system-test/install.ts new file mode 100644 index 0000000..8ec4522 --- /dev/null +++ b/owl-bot-staging/v2/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import { packNTest } from 'pack-n-play'; +import { readFileSync } from 'fs'; +import { describe, it } from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts new file mode 100644 index 0000000..eb05cd6 --- /dev/null +++ b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts @@ -0,0 +1,1254 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import { describe, it } from 'mocha'; +import * as migrationserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v2.MigrationServiceClient', () => { + it('has servicePath', () => { + const servicePath = migrationserviceModule.v2.MigrationServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = migrationserviceModule.v2.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('createMigrationWorkflow', () => { + it('invokes createMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.createMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationWorkflow', () => { + it('invokes getMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + }); + }); + + describe('deleteMigrationWorkflow', () => { + it('invokes deleteMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.deleteMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + }); + }); + + describe('startMigrationWorkflow', () => { + it('invokes startMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.startMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationSubtask', () => { + it('invokes getMigrationSubtask without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationSubtask(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationSubtask( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationSubtask with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + }); + }); + + describe('listMigrationWorkflows', () => { + it('invokes listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationWorkflows(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflows without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationWorkflows( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationWorkflows(request), expectedError); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflowsStream without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationWorkflowsStream with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; + const iterable = client.listMigrationWorkflowsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationWorkflowsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('listMigrationSubtasks', () => { + it('invokes listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationSubtasks(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasks without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationSubtasks( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationSubtasks(request), expectedError); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasksStream without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationSubtasksStream with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; + const iterable = client.listMigrationSubtasksAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationSubtasksAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationSubtask', () => { + const fakePath = "/rendered/path/migrationSubtask"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + subtask: "subtaskValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationSubtaskPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationSubtaskPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationSubtaskPath', () => { + const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationSubtaskName', () => { + const result = client.matchProjectFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationSubtaskName', () => { + const result = client.matchLocationFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationSubtaskName', () => { + const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchSubtaskFromMigrationSubtaskName', () => { + const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "subtaskValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationWorkflow', () => { + const fakePath = "/rendered/path/migrationWorkflow"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationWorkflowPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationWorkflowPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationWorkflowPath', () => { + const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationWorkflowName', () => { + const result = client.matchProjectFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationWorkflowName', () => { + const result = client.matchLocationFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationWorkflowName', () => { + const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v2/tsconfig.json b/owl-bot-staging/v2/tsconfig.json new file mode 100644 index 0000000..c78f1c8 --- /dev/null +++ b/owl-bot-staging/v2/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v2/webpack.config.js b/owl-bot-staging/v2/webpack.config.js new file mode 100644 index 0000000..9178c4b --- /dev/null +++ b/owl-bot-staging/v2/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'MigrationService', + filename: './migration-service.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; diff --git a/owl-bot-staging/v2alpha/.eslintignore b/owl-bot-staging/v2alpha/.eslintignore new file mode 100644 index 0000000..cfc348e --- /dev/null +++ b/owl-bot-staging/v2alpha/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v2alpha/.eslintrc.json b/owl-bot-staging/v2alpha/.eslintrc.json new file mode 100644 index 0000000..7821534 --- /dev/null +++ b/owl-bot-staging/v2alpha/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v2alpha/.gitignore b/owl-bot-staging/v2alpha/.gitignore new file mode 100644 index 0000000..5d32b23 --- /dev/null +++ b/owl-bot-staging/v2alpha/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v2alpha/.jsdoc.js b/owl-bot-staging/v2alpha/.jsdoc.js new file mode 100644 index 0000000..aabe555 --- /dev/null +++ b/owl-bot-staging/v2alpha/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/migration', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v2alpha/.mocharc.js b/owl-bot-staging/v2alpha/.mocharc.js new file mode 100644 index 0000000..481c522 --- /dev/null +++ b/owl-bot-staging/v2alpha/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v2alpha/.prettierrc.js b/owl-bot-staging/v2alpha/.prettierrc.js new file mode 100644 index 0000000..494e147 --- /dev/null +++ b/owl-bot-staging/v2alpha/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v2alpha/README.md b/owl-bot-staging/v2alpha/README.md new file mode 100644 index 0000000..4e0341c --- /dev/null +++ b/owl-bot-staging/v2alpha/README.md @@ -0,0 +1 @@ +Migration: Nodejs Client diff --git a/owl-bot-staging/v2alpha/linkinator.config.json b/owl-bot-staging/v2alpha/linkinator.config.json new file mode 100644 index 0000000..befd23c --- /dev/null +++ b/owl-bot-staging/v2alpha/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2alpha/package.json b/owl-bot-staging/v2alpha/package.json new file mode 100644 index 0000000..6b38f7b --- /dev/null +++ b/owl-bot-staging/v2alpha/package.json @@ -0,0 +1,64 @@ +{ + "name": "@google-cloud/migration", + "version": "0.1.0", + "description": "Migration client for Node.js", + "repository": "googleapis/nodejs-migration", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google migration", + "migration", + "migration service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^3.1.1" + }, + "devDependencies": { + "@types/mocha": "^9.1.0", + "@types/node": "^16.0.0", + "@types/sinon": "^10.0.8", + "c8": "^7.11.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.7", + "jsdoc-fresh": "^2.0.0", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^3.0.0", + "mocha": "^10.0.0", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^9.2.6", + "typescript": "^4.5.5", + "webpack": "^5.67.0", + "webpack-cli": "^4.9.1" + }, + "engines": { + "node": ">=v12" + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto new file mode 100644 index 0000000..0c6ea13 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto @@ -0,0 +1,49 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "AssessmentTaskProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Assessment task config. +message AssessmentTaskDetails { + // Required. The Cloud Storage path for assessment input files. + string input_path = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The BigQuery dataset for output. + string output_dataset = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. An optional Cloud Storage path to write the query logs (which is + // then used as an input path on the translation task) + string querylogs_path = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data source or data warehouse type (eg: TERADATA/REDSHIFT) + // from which the input data is extracted. + string data_source = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// Details for an assessment task orchestration result. +message AssessmentOrchestrationResultDetails { + // Optional. The version used for the output table schemas. + string output_tables_schema_version = 1 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto new file mode 100644 index 0000000..50d4c75 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto @@ -0,0 +1,244 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/assessment_task.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/cloud/bigquery/migration/v2alpha/translation_task.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // The details of the task. + oneof task_details { + // Task configuration for Assessment. + AssessmentTaskDetails assessment_task_details = 12; + + // Task configuration for Batch/Offline SQL Translation. + TranslationTaskDetails translation_task_details = 13; + } + + // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be a supported task type. + string type = 2; + + // DEPRECATED! Use one of the task_details below. + // The details of the task. The type URL must be one of the supported task + // details messages and correspond to the Task's type. + google.protobuf.Any details = 3; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; + + // Output only. Additional information about the orchestration. + MigrationTaskOrchestrationResult orchestration_result = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while processing the + // subtask. Presence of error details does not mean that the subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} + +// Additional information from the orchestrator when it is done with the +// task orchestration. +message MigrationTaskOrchestrationResult { + // Details specific to the task type. + oneof details { + // Details specific to assessment task types. + AssessmentOrchestrationResultDetails assessment_details = 1; + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto new file mode 100644 index 0000000..89dac5e --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto new file mode 100644 index 0000000..ce60dd2 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately +/-9.2x10^18. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately +/-10^(+/-300) and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto new file mode 100644 index 0000000..9a184a1 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto @@ -0,0 +1,247 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2alpha/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto new file mode 100644 index 0000000..bf4b27e --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto @@ -0,0 +1,207 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "TranslationTaskProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Mapping between an input and output file to be translated in a subtask. +message TranslationFileMapping { + // The Cloud Storage path for a file to translation in a subtask. + string input_path = 1; + + // The Cloud Storage path to write back the corresponding input file to. + string output_path = 2; +} + +// The translation task config to capture necessary settings for a translation +// task and subtask. +message TranslationTaskDetails { + // The file encoding types. + enum FileEncoding { + // File encoding setting is not specified. + FILE_ENCODING_UNSPECIFIED = 0; + + // File encoding is UTF_8. + UTF_8 = 1; + + // File encoding is ISO_8859_1. + ISO_8859_1 = 2; + + // File encoding is US_ASCII. + US_ASCII = 3; + + // File encoding is UTF_16. + UTF_16 = 4; + + // File encoding is UTF_16LE. + UTF_16LE = 5; + + // File encoding is UTF_16BE. + UTF_16BE = 6; + } + + // The special token data type. + enum TokenType { + // Token type is not specified. + TOKEN_TYPE_UNSPECIFIED = 0; + + // Token type as string. + STRING = 1; + + // Token type as integer. + INT64 = 2; + + // Token type as numeric. + NUMERIC = 3; + + // Token type as boolean. + BOOL = 4; + + // Token type as float. + FLOAT64 = 5; + + // Token type as date. + DATE = 6; + + // Token type as timestamp. + TIMESTAMP = 7; + } + + // The language specific settings for the translation task. + oneof language_options { + // The Teradata SQL specific settings for the translation task. + TeradataOptions teradata_options = 10; + + // The BTEQ specific settings for the translation task. + BteqOptions bteq_options = 11; + } + + // The Cloud Storage path for translation input files. + string input_path = 1; + + // The Cloud Storage path for translation output files. + string output_path = 2; + + // Cloud Storage files to be processed for translation. + repeated TranslationFileMapping file_paths = 12; + + // The Cloud Storage path to DDL files as table schema to assist semantic + // translation. + string schema_path = 3; + + // The file encoding type. + FileEncoding file_encoding = 4; + + // The settings for SQL identifiers. + IdentifierSettings identifier_settings = 5; + + // The map capturing special tokens to be replaced during translation. The key + // is special token in string. The value is the token data type. This is used + // to translate SQL query template which contains special token as place + // holder. The special token makes a query invalid to parse. This map will be + // applied to annotate those special token with types to let parser understand + // how to parse them into proper structure with type information. + map special_token_map = 6; + + // The filter applied to translation details. + Filter filter = 7; + + // Specifies the exact name of the bigquery table ("dataset.table") to be used + // for surfacing raw translation errors. If the table does not exist, we will + // create it. If it already exists and the schema is the same, we will re-use. + // If the table exists and the schema is different, we will throw an error. + string translation_exception_table = 13; +} + +// The filter applied to fields of translation details. +message Filter { + // The list of prefixes used to exclude processing for input files. + repeated string input_file_exclusion_prefixes = 1; +} + +// Settings related to SQL identifiers. +message IdentifierSettings { + // The identifier case type. + enum IdentifierCase { + // The identifier case is not specified. + IDENTIFIER_CASE_UNSPECIFIED = 0; + + // Identifiers' cases will be kept as the original cases. + ORIGINAL = 1; + + // Identifiers will be in upper cases. + UPPER = 2; + + // Identifiers will be in lower cases. + LOWER = 3; + } + + // The SQL identifier rewrite mode. + enum IdentifierRewriteMode { + // SQL Identifier rewrite mode is unspecified. + IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0; + + // SQL identifiers won't be rewrite. + NONE = 1; + + // All SQL identifiers will be rewrite. + REWRITE_ALL = 2; + } + + // The setting to control output queries' identifier case. + IdentifierCase output_identifier_case = 1; + + // Specifies the rewrite mode for SQL identifiers. + IdentifierRewriteMode identifier_rewrite_mode = 2; +} + +// Teradata SQL specific translation task related settings. +message TeradataOptions { + +} + +// BTEQ translation task related settings. +message BteqOptions { + // Specifies the project and dataset in BigQuery that will be used for + // external table creation during the translation. + DatasetReference project_dataset = 1; + + // The Cloud Storage location to be used as the default path for files that + // are not otherwise specified in the file replacement map. + string default_path_uri = 2; + + // Maps the local paths that are used in BTEQ scripts (the keys) to the paths + // in Cloud Storage that should be used in their stead in the translation (the + // value). + map file_replacement_map = 3; +} + +// Reference to a BigQuery dataset. +message DatasetReference { + // A unique ID for this dataset, without the project name. The ID + // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). + // The maximum length is 1,024 characters. + string dataset_id = 1; + + // The ID of the project containing this dataset. + string project_id = 2; +} diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js new file mode 100644 index 0000000..4004520 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js @@ -0,0 +1,64 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, migrationWorkflow) { + // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + */ + // const parent = 'abc123' + /** + * Required. The migration workflow to create. + */ + // const migrationWorkflow = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callCreateMigrationWorkflow() { + // Construct request + const request = { + parent, + migrationWorkflow, + }; + + // Run request + const response = await migrationClient.createMigrationWorkflow(request); + console.log(response); + } + + callCreateMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js new file mode 100644 index 0000000..04adf0d --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js @@ -0,0 +1,59 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callDeleteMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.deleteMigrationWorkflow(request); + console.log(response); + } + + callDeleteMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js new file mode 100644 index 0000000..8bf28e2 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js @@ -0,0 +1,63 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + */ + // const name = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationSubtask() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationSubtask(request); + console.log(response); + } + + callGetMigrationSubtask(); + // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js new file mode 100644 index 0000000..bbc9e68 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js @@ -0,0 +1,63 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationWorkflow(request); + console.log(response); + } + + callGetMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js new file mode 100644 index 0000000..61a50a0 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js @@ -0,0 +1,83 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const parent = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + /** + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + /** + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + */ + // const filter = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationSubtasks() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationSubtasksAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationSubtasks(); + // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js new file mode 100644 index 0000000..41d06f8 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js @@ -0,0 +1,77 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + */ + // const parent = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + /** + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationWorkflows() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationWorkflowsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationWorkflows(); + // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js new file mode 100644 index 0000000..236300c --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js @@ -0,0 +1,59 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callStartMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.startMigrationWorkflow(request); + console.log(response); + } + + callStartMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json new file mode 100644 index 0000000..e8dd517 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json @@ -0,0 +1,335 @@ +{ + "clientLibrary": { + "name": "nodejs-migration", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2alpha", + "version": "v2alpha" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", + "title": "MigrationService createMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Creates a migration workflow.", + "canonical": true, + "file": "migration_service.create_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 56, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "migration_workflow", + "type": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", + "title": "MigrationService getMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration workflow.", + "canonical": true, + "file": "migration_service.get_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 55, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", + "title": "MigrationService listMigrationWorkflows Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration workflow.", + "canonical": true, + "file": "migration_service.list_migration_workflows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 69, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", + "title": "MigrationService deleteMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Deletes a migration workflow by name.", + "canonical": true, + "file": "migration_service.delete_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", + "title": "MigrationService startMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", + "canonical": true, + "file": "migration_service.start_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", + "title": "MigrationService getMigrationSubtask Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration subtask.", + "canonical": true, + "file": "migration_service.get_migration_subtask.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 55, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationSubtask", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", + "title": "MigrationService listMigrationSubtasks Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration subtasks.", + "canonical": true, + "file": "migration_service.list_migration_subtasks.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 75, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v2alpha/src/index.ts b/owl-bot-staging/v2alpha/src/index.ts new file mode 100644 index 0000000..288e629 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v2alpha from './v2alpha'; +const MigrationServiceClient = v2alpha.MigrationServiceClient; +type MigrationServiceClient = v2alpha.MigrationServiceClient; +export {v2alpha, MigrationServiceClient}; +export default {v2alpha, MigrationServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json new file mode 100644 index 0000000..f751ba9 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json @@ -0,0 +1,101 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.migration.v2alpha", + "libraryPackage": "@google-cloud/migration", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/index.ts b/owl-bot-staging/v2alpha/src/v2alpha/index.ts new file mode 100644 index 0000000..f75d208 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts new file mode 100644 index 0000000..50e1053 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts @@ -0,0 +1,1246 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; + +import { Transform } from 'stream'; +import { RequestType } from 'google-gax/build/src/apitypes'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v2alpha/migration_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './migration_service_client_config.json'; + +const version = require('../../../package.json').version; + +/** + * Service to handle EDW migrations. + * @class + * @memberof v2alpha + */ +export class MigrationServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + migrationServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MigrationServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof MigrationServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' + ), + migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listMigrationWorkflows: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), + listMigrationSubtasks: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.migration.v2alpha.MigrationService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.migrationServiceStub) { + return this.migrationServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.migration.v2alpha.MigrationService. + this.migrationServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2alpha.MigrationService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.migration.v2alpha.MigrationService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const migrationServiceStubMethods = + ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; + for (const methodName of migrationServiceStubMethods) { + const callPromise = this.migrationServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.migrationServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates a migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param {google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} request.migrationWorkflow + * Required. The migration workflow to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.create_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async + */ + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.get_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async + */ + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationWorkflow(request, options, callback); + } +/** + * Deletes a migration workflow by name. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.delete_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async + */ + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); + } +/** + * Starts a previously created migration workflow. I.e., the state transitions + * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + * An error will be signaled if the state is anything other than DRAFT or + * RUNNING. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.start_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async + */ + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.startMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration subtask. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.get_migration_subtask.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async + */ + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationSubtask(request, options, callback); + } + + /** + * Lists previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse + ]>; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationWorkflows(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflowsStream( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.createStream( + this.innerApiCalls.listMigrationWorkflows as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationWorkflows`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.list_migration_workflows.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async + */ + listMigrationWorkflowsAsync( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.asyncIterate( + this.innerApiCalls['listMigrationWorkflows'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + /** + * Lists previously created migration subtasks. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse + ]>; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationSubtasks(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasksStream( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.createStream( + this.innerApiCalls.listMigrationSubtasks as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationSubtasks`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.list_migration_subtasks.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async + */ + listMigrationSubtasksAsync( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.asyncIterate( + this.innerApiCalls['listMigrationSubtasks'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified migrationSubtask resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @param {string} subtask + * @returns {string} Resource name string. + */ + migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { + return this.pathTemplates.migrationSubtaskPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + subtask: subtask, + }); + } + + /** + * Parse the project from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; + } + + /** + * Parse the location from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; + } + + /** + * Parse the workflow from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; + } + + /** + * Parse the subtask from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the subtask. + */ + matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; + } + + /** + * Return a fully-qualified migrationWorkflow resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @returns {string} Resource name string. + */ + migrationWorkflowPath(project:string,location:string,workflow:string) { + return this.pathTemplates.migrationWorkflowPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + }); + } + + /** + * Parse the project from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; + } + + /** + * Parse the location from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; + } + + /** + * Parse the workflow from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.migrationServiceStub && !this._terminated) { + return this.migrationServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json new file mode 100644 index 0000000..2184b83 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json @@ -0,0 +1,73 @@ +{ + "interfaces": { + "google.cloud.bigquery.migration.v2alpha.MigrationService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 10000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetMigrationWorkflow": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationWorkflows": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "DeleteMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartMigrationWorkflow": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "GetMigrationSubtask": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationSubtasks": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + } + } + } + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json new file mode 100644 index 0000000..8e91e42 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json @@ -0,0 +1,8 @@ +[ + "../../protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto" +] diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js new file mode 100644 index 0000000..aa2c893 --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const migration = require('@google-cloud/migration'); + +function main() { + const migrationServiceClient = new migration.MigrationServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts new file mode 100644 index 0000000..0afe940 --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,32 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {MigrationServiceClient} from '@google-cloud/migration'; + +// check that the client class type name can be used +function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const migrationServiceClient = new MigrationServiceClient(); + doStuffWithMigrationServiceClient(migrationServiceClient); +} + +main(); diff --git a/owl-bot-staging/v2alpha/system-test/install.ts b/owl-bot-staging/v2alpha/system-test/install.ts new file mode 100644 index 0000000..8ec4522 --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import { packNTest } from 'pack-n-play'; +import { readFileSync } from 'fs'; +import { describe, it } from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts new file mode 100644 index 0000000..4b80fbb --- /dev/null +++ b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts @@ -0,0 +1,1254 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import { describe, it } from 'mocha'; +import * as migrationserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v2alpha.MigrationServiceClient', () => { + it('has servicePath', () => { + const servicePath = migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('createMigrationWorkflow', () => { + it('invokes createMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.createMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationWorkflow', () => { + it('invokes getMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + }); + }); + + describe('deleteMigrationWorkflow', () => { + it('invokes deleteMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.deleteMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + }); + }); + + describe('startMigrationWorkflow', () => { + it('invokes startMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.startMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationSubtask', () => { + it('invokes getMigrationSubtask without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationSubtask(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationSubtask( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationSubtask with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + }); + }); + + describe('listMigrationWorkflows', () => { + it('invokes listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationWorkflows(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflows without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationWorkflows( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationWorkflows(request), expectedError); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflowsStream without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationWorkflowsStream with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; + const iterable = client.listMigrationWorkflowsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationWorkflowsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('listMigrationSubtasks', () => { + it('invokes listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationSubtasks(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasks without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationSubtasks( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationSubtasks(request), expectedError); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasksStream without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationSubtasksStream with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; + const iterable = client.listMigrationSubtasksAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationSubtasksAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationSubtask', () => { + const fakePath = "/rendered/path/migrationSubtask"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + subtask: "subtaskValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationSubtaskPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationSubtaskPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationSubtaskPath', () => { + const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationSubtaskName', () => { + const result = client.matchProjectFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationSubtaskName', () => { + const result = client.matchLocationFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationSubtaskName', () => { + const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchSubtaskFromMigrationSubtaskName', () => { + const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "subtaskValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationWorkflow', () => { + const fakePath = "/rendered/path/migrationWorkflow"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationWorkflowPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationWorkflowPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationWorkflowPath', () => { + const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationWorkflowName', () => { + const result = client.matchProjectFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationWorkflowName', () => { + const result = client.matchLocationFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationWorkflowName', () => { + const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v2alpha/tsconfig.json b/owl-bot-staging/v2alpha/tsconfig.json new file mode 100644 index 0000000..c78f1c8 --- /dev/null +++ b/owl-bot-staging/v2alpha/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v2alpha/webpack.config.js b/owl-bot-staging/v2alpha/webpack.config.js new file mode 100644 index 0000000..9178c4b --- /dev/null +++ b/owl-bot-staging/v2alpha/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'MigrationService', + filename: './migration-service.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; From 4b444aa8f500e70ea86fac1d889d531a5c492952 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 19 Aug 2022 20:10:43 +0000 Subject: [PATCH 04/13] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- owl-bot-staging/v2/.eslintignore | 7 - owl-bot-staging/v2/.eslintrc.json | 3 - owl-bot-staging/v2/.gitignore | 14 - owl-bot-staging/v2/.jsdoc.js | 55 - owl-bot-staging/v2/.mocharc.js | 33 - owl-bot-staging/v2/.prettierrc.js | 22 - owl-bot-staging/v2/README.md | 1 - owl-bot-staging/v2/linkinator.config.json | 16 - owl-bot-staging/v2/package.json | 64 - .../migration/v2/migration_entities.proto | 233 --- .../v2/migration_error_details.proto | 62 - .../migration/v2/migration_metrics.proto | 111 -- .../migration/v2/migration_service.proto | 245 ---- .../migration/v2/translation_config.proto | 257 ---- ...ation_service.create_migration_workflow.js | 64 - ...ation_service.delete_migration_workflow.js | 59 - ...migration_service.get_migration_subtask.js | 63 - ...igration_service.get_migration_workflow.js | 63 - ...gration_service.list_migration_subtasks.js | 83 -- ...ration_service.list_migration_workflows.js | 77 - ...ration_service.start_migration_workflow.js | 59 - ...ta.google.cloud.bigquery.migration.v2.json | 335 ----- owl-bot-staging/v2/src/index.ts | 25 - owl-bot-staging/v2/src/v2/gapic_metadata.json | 101 -- owl-bot-staging/v2/src/v2/index.ts | 19 - .../v2/src/v2/migration_service_client.ts | 1246 ---------------- .../v2/migration_service_client_config.json | 71 - .../src/v2/migration_service_proto_list.json | 7 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 32 - owl-bot-staging/v2/system-test/install.ts | 49 - .../v2/test/gapic_migration_service_v2.ts | 1254 ----------------- owl-bot-staging/v2/tsconfig.json | 19 - owl-bot-staging/v2/webpack.config.js | 64 - owl-bot-staging/v2alpha/.eslintignore | 7 - owl-bot-staging/v2alpha/.eslintrc.json | 3 - owl-bot-staging/v2alpha/.gitignore | 14 - owl-bot-staging/v2alpha/.jsdoc.js | 55 - owl-bot-staging/v2alpha/.mocharc.js | 33 - owl-bot-staging/v2alpha/.prettierrc.js | 22 - owl-bot-staging/v2alpha/README.md | 1 - .../v2alpha/linkinator.config.json | 16 - owl-bot-staging/v2alpha/package.json | 64 - .../migration/v2alpha/assessment_task.proto | 49 - .../v2alpha/migration_entities.proto | 244 ---- .../v2alpha/migration_error_details.proto | 62 - .../migration/v2alpha/migration_metrics.proto | 111 -- .../migration/v2alpha/migration_service.proto | 247 ---- .../migration/v2alpha/translation_task.proto | 207 --- ...ation_service.create_migration_workflow.js | 64 - ...ation_service.delete_migration_workflow.js | 59 - ...migration_service.get_migration_subtask.js | 63 - ...igration_service.get_migration_workflow.js | 63 - ...gration_service.list_migration_subtasks.js | 83 -- ...ration_service.list_migration_workflows.js | 77 - ...ration_service.start_migration_workflow.js | 59 - ...ogle.cloud.bigquery.migration.v2alpha.json | 335 ----- owl-bot-staging/v2alpha/src/index.ts | 25 - .../v2alpha/src/v2alpha/gapic_metadata.json | 101 -- owl-bot-staging/v2alpha/src/v2alpha/index.ts | 19 - .../src/v2alpha/migration_service_client.ts | 1246 ---------------- .../migration_service_client_config.json | 73 - .../v2alpha/migration_service_proto_list.json | 8 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 32 - .../v2alpha/system-test/install.ts | 49 - .../test/gapic_migration_service_v2alpha.ts | 1254 ----------------- owl-bot-staging/v2alpha/tsconfig.json | 19 - owl-bot-staging/v2alpha/webpack.config.js | 64 - .../migration/v2alpha/migration_service.proto | 1 - 70 files changed, 9696 deletions(-) delete mode 100644 owl-bot-staging/v2/.eslintignore delete mode 100644 owl-bot-staging/v2/.eslintrc.json delete mode 100644 owl-bot-staging/v2/.gitignore delete mode 100644 owl-bot-staging/v2/.jsdoc.js delete mode 100644 owl-bot-staging/v2/.mocharc.js delete mode 100644 owl-bot-staging/v2/.prettierrc.js delete mode 100644 owl-bot-staging/v2/README.md delete mode 100644 owl-bot-staging/v2/linkinator.config.json delete mode 100644 owl-bot-staging/v2/package.json delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json delete mode 100644 owl-bot-staging/v2/src/index.ts delete mode 100644 owl-bot-staging/v2/src/v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/src/v2/index.ts delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_client.ts delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_client_config.json delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_proto_list.json delete mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v2/system-test/install.ts delete mode 100644 owl-bot-staging/v2/test/gapic_migration_service_v2.ts delete mode 100644 owl-bot-staging/v2/tsconfig.json delete mode 100644 owl-bot-staging/v2/webpack.config.js delete mode 100644 owl-bot-staging/v2alpha/.eslintignore delete mode 100644 owl-bot-staging/v2alpha/.eslintrc.json delete mode 100644 owl-bot-staging/v2alpha/.gitignore delete mode 100644 owl-bot-staging/v2alpha/.jsdoc.js delete mode 100644 owl-bot-staging/v2alpha/.mocharc.js delete mode 100644 owl-bot-staging/v2alpha/.prettierrc.js delete mode 100644 owl-bot-staging/v2alpha/README.md delete mode 100644 owl-bot-staging/v2alpha/linkinator.config.json delete mode 100644 owl-bot-staging/v2alpha/package.json delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json delete mode 100644 owl-bot-staging/v2alpha/src/index.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/index.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json delete mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v2alpha/system-test/install.ts delete mode 100644 owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts delete mode 100644 owl-bot-staging/v2alpha/tsconfig.json delete mode 100644 owl-bot-staging/v2alpha/webpack.config.js diff --git a/owl-bot-staging/v2/.eslintignore b/owl-bot-staging/v2/.eslintignore deleted file mode 100644 index cfc348e..0000000 --- a/owl-bot-staging/v2/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v2/.eslintrc.json b/owl-bot-staging/v2/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/owl-bot-staging/v2/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v2/.gitignore b/owl-bot-staging/v2/.gitignore deleted file mode 100644 index 5d32b23..0000000 --- a/owl-bot-staging/v2/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v2/.jsdoc.js b/owl-bot-staging/v2/.jsdoc.js deleted file mode 100644 index aabe555..0000000 --- a/owl-bot-staging/v2/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/migration', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v2/.mocharc.js b/owl-bot-staging/v2/.mocharc.js deleted file mode 100644 index 481c522..0000000 --- a/owl-bot-staging/v2/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v2/.prettierrc.js b/owl-bot-staging/v2/.prettierrc.js deleted file mode 100644 index 494e147..0000000 --- a/owl-bot-staging/v2/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v2/README.md b/owl-bot-staging/v2/README.md deleted file mode 100644 index 4e0341c..0000000 --- a/owl-bot-staging/v2/README.md +++ /dev/null @@ -1 +0,0 @@ -Migration: Nodejs Client diff --git a/owl-bot-staging/v2/linkinator.config.json b/owl-bot-staging/v2/linkinator.config.json deleted file mode 100644 index befd23c..0000000 --- a/owl-bot-staging/v2/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v2/package.json b/owl-bot-staging/v2/package.json deleted file mode 100644 index 6b38f7b..0000000 --- a/owl-bot-staging/v2/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "@google-cloud/migration", - "version": "0.1.0", - "description": "Migration client for Node.js", - "repository": "googleapis/nodejs-migration", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google migration", - "migration", - "migration service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^3.1.1" - }, - "devDependencies": { - "@types/mocha": "^9.1.0", - "@types/node": "^16.0.0", - "@types/sinon": "^10.0.8", - "c8": "^7.11.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.7", - "jsdoc-fresh": "^2.0.0", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^3.0.0", - "mocha": "^10.0.0", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^9.2.6", - "typescript": "^4.5.5", - "webpack": "^5.67.0", - "webpack-cli": "^4.9.1" - }, - "engines": { - "node": ">=v12" - } -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto deleted file mode 100644 index 7d77bae..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto +++ /dev/null @@ -1,233 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2/migration_metrics.proto"; -import "google/cloud/bigquery/migration/v2/translation_config.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationEntitiesProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// A migration workflow which specifies what needs to be done for an EDW -// migration. -message MigrationWorkflow { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}" - }; - - // Possible migration workflow states. - enum State { - // Workflow state is unspecified. - STATE_UNSPECIFIED = 0; - - // Workflow is in draft status, i.e. tasks are not yet eligible for - // execution. - DRAFT = 1; - - // Workflow is running (i.e. tasks are eligible for execution). - RUNNING = 2; - - // Workflow is paused. Tasks currently in progress may continue, but no - // further tasks will be scheduled. - PAUSED = 3; - - // Workflow is complete. There should not be any task in a non-terminal - // state, but if they are (e.g. forced termination), they will not be - // scheduled. - COMPLETED = 4; - } - - // Output only. Immutable. The unique identifier for the migration workflow. - // The ID is server-generated. - // - // Example: `projects/123/locations/us/workflows/345` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The display name of the workflow. This can be set to give a workflow - // a descriptive name. There is no guarantee or enforcement of uniqueness. - string display_name = 6; - - // The tasks in a workflow in a named map. The name (i.e. key) has no - // meaning and is merely a convenient way to address a specific task - // in a workflow. - map tasks = 2; - - // Output only. That status of the workflow. - State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the workflow was created. - google.protobuf.Timestamp create_time = 4; - - // Time when the workflow was last updated. - google.protobuf.Timestamp last_update_time = 5; -} - -// A single task for a migration which has details about the configuration of -// the task. -message MigrationTask { - // Possible states of a migration task. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The task is waiting for orchestration. - PENDING = 1; - - // The task is assigned to an orchestrator. - ORCHESTRATING = 2; - - // The task is running, i.e. its subtasks are ready for execution. - RUNNING = 3; - - // Tha task is paused. Assigned subtasks can continue, but no new subtasks - // will be scheduled. - PAUSED = 4; - - // The task finished successfully. - SUCCEEDED = 5; - - // The task finished unsuccessfully. - FAILED = 6; - } - - // The details of the task. - oneof task_details { - // Task configuration for Batch SQL Translation. - TranslationConfigDetails translation_config_details = 14; - } - - // Output only. Immutable. The unique identifier for the migration task. The - // ID is server-generated. - string id = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The type of the task. This must be one of the supported task types: - // Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ, - // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, - // Translation_Snowflake2BQ, Translation_Netezza2BQ, - // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, - // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. - string type = 2; - - // Output only. The current state of the task. - State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in - // FAILED state. - google.rpc.ErrorInfo processing_error = 5 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the task was created. - google.protobuf.Timestamp create_time = 6; - - // Time when the task was last updated. - google.protobuf.Timestamp last_update_time = 7; -} - -// A subtask for a migration which carries details about the configuration of -// the subtask. The content of the details should not matter to the end user, -// but is a contract between the subtask creator and subtask worker. -message MigrationSubtask { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" - }; - - // Possible states of a migration subtask. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The subtask is ready, i.e. it is ready for execution. - ACTIVE = 1; - - // The subtask is running, i.e. it is assigned to a worker for execution. - RUNNING = 2; - - // The subtask finished successfully. - SUCCEEDED = 3; - - // The subtask finished unsuccessfully. - FAILED = 4; - - // The subtask is paused, i.e., it will not be scheduled. If it was already - // assigned,it might still finish but no new lease renewals will be granted. - PAUSED = 5; - } - - // Output only. Immutable. The resource name for the migration subtask. The ID - // is server-generated. - // - // Example: `projects/123/locations/us/workflows/345/subtasks/678` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The unique ID of the task to which this subtask belongs. - string task_id = 2; - - // The type of the Subtask. The migration service does not check whether this - // is a known type. It is up to the task creator (i.e. orchestrator or worker) - // to ensure it only creates subtasks for which there are compatible workers - // polling for Subtasks. - string type = 3; - - // Output only. The current state of the subtask. - State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in - // FAILED state. - google.rpc.ErrorInfo processing_error = 6 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Provides details to errors and issues encountered while - // processing the subtask. Presence of error details does not mean that the - // subtask failed. - repeated ResourceErrorDetail resource_error_details = 12 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The number or resources with errors. Note: This is not the total - // number of errors as each resource can have more than one error. - // This is used to indicate truncation by having a `resource_error_count` - // that is higher than the size of `resource_error_details`. - int32 resource_error_count = 13; - - // Time when the subtask was created. - google.protobuf.Timestamp create_time = 7; - - // Time when the subtask was last updated. - google.protobuf.Timestamp last_update_time = 8; - - // The metrics for the subtask. - repeated TimeSeries metrics = 11; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto deleted file mode 100644 index 199e2db..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/field_behavior.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationErrorDetailsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// Provides details for errors and the corresponding resources. -message ResourceErrorDetail { - // Required. Information about the resource where the error is located. - google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The error details for the resource. - repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. How many errors there are in total for the resource. Truncation can be - // indicated by having an `error_count` that is higher than the size of - // `error_details`. - int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Provides details for errors, e.g. issues that where encountered when -// processing a subtask. -message ErrorDetail { - // Optional. The exact location within the resource (if applicable). - ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Describes the cause of the error with structured detail. - google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Holds information about where the error is located. -message ErrorLocation { - // Optional. If applicable, denotes the line where the error occurred. A zero value - // means that there is no line information. - int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If applicable, denotes the column where the error occurred. A zero value - // means that there is no columns information. - int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto deleted file mode 100644 index e52fead..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/distribution.proto"; -import "google/api/field_behavior.proto"; -import "google/api/metric.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationMetricsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// The metrics object for a SubTask. -message TimeSeries { - // Required. The name of the metric. - // - // If the metric is not known by the service yet, it will be auto-created. - string metric = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The value type of the time series. - google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric kind of the time series. - // - // If present, it must be the same as the metric kind of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // this field specifies the metric kind of the new descriptor and must be - // either `GAUGE` (the default) or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time does not need to be supplied, but if it is supplied, it must - // equal the end time. For `DELTA` metrics, the start and end time should - // specify a non-zero interval, with subsequent points specifying contiguous - // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end - // time should specify a non-zero interval, with subsequent points specifying - // the same start time and increasing end times, until an event resets the - // cumulative value to zero and sets a new start time for the following - // points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A time interval extending just after a start time through an end time. -// If the start time is the same as the end time, then the interval -// represents a single point in time. -message TimeInterval { - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately `+/-9.2x10^18`. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately `+/-10^(+/-300)` and it has 16 significant digits of - // precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto deleted file mode 100644 index 3c1a89e..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto +++ /dev/null @@ -1,245 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2/migration_entities.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationServiceProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// Service to handle EDW migrations. -service MigrationService { - option (google.api.default_host) = "bigquerymigration.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a migration workflow. - rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - post: "/v2/{parent=projects/*/locations/*}/workflows" - body: "migration_workflow" - }; - option (google.api.method_signature) = "parent,migration_workflow"; - } - - // Gets a previously created migration workflow. - rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - get: "/v2/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration workflow. - rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { - option (google.api.http) = { - get: "/v2/{parent=projects/*/locations/*}/workflows" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a migration workflow by name. - rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Starts a previously created migration workflow. I.e., the state transitions - // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - // An error will be signaled if the state is anything other than DRAFT or - // RUNNING. - rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2/{name=projects/*/locations/*/workflows/*}:start" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Gets a previously created migration subtask. - rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { - option (google.api.http) = { - get: "/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration subtasks. - rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { - option (google.api.http) = { - get: "/v2/{parent=projects/*/locations/*/workflows/*}/subtasks" - }; - option (google.api.method_signature) = "parent"; - } -} - -// Request to create a migration workflow resource. -message CreateMigrationWorkflowRequest { - // Required. The name of the project to which this migration workflow belongs. - // Example: `projects/foo/locations/bar` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // Required. The migration workflow to create. - MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to get a previously created migration workflow. -message GetMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; -} - -// A request to list previously created migration workflows. -message ListMigrationWorkflowsRequest { - // Required. The project and location of the migration workflows to list. - // Example: `projects/123/locations/us` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; - - // The maximum number of migration workflows to return. The service may return - // fewer than this number. - int32 page_size = 3; - - // A page token, received from previous `ListMigrationWorkflows` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationWorkflows` - // must match the call that provided the page token. - string page_token = 4; -} - -// Response object for a `ListMigrationWorkflows` call. -message ListMigrationWorkflowsResponse { - // The migration workflows for the specified project / location. - repeated MigrationWorkflow migration_workflows = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a previously created migration workflow. -message DeleteMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to start a previously created migration workflow. -message StartMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to get a previously created migration subtasks. -message GetMigrationSubtaskRequest { - // Required. The unique identifier for the migration subtask. - // Example: `projects/123/locations/us/workflows/1234/subtasks/543` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to list previously created migration subtasks. -message ListMigrationSubtasksRequest { - // Required. The migration task of the subtasks to list. - // Example: `projects/123/locations/us/workflows/1234` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of migration tasks to return. The service may return - // fewer than this number. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token, received from previous `ListMigrationSubtasks` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationSubtasks` - // must match the call that provided the page token. - string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The filter to apply. This can be used to get the subtasks of a specific - // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - // task ID (not the name in the named map). - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response object for a `ListMigrationSubtasks` call. -message ListMigrationSubtasksResponse { - // The migration subtasks for the specified task. - repeated MigrationSubtask migration_subtasks = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto deleted file mode 100644 index 994140d..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto +++ /dev/null @@ -1,257 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "TranslationConfigProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// The translation config to capture necessary settings for a translation task -// and subtask. -message TranslationConfigDetails { - // The chosen path where the source for input files will be found. - oneof source_location { - // The Cloud Storage path for a directory of files to translate in a task. - string gcs_source_path = 1; - } - - // The chosen path where the destination for output files will be found. - oneof target_location { - // The Cloud Storage path to write back the corresponding input files to. - string gcs_target_path = 2; - } - - // The dialect of the input files. - Dialect source_dialect = 3; - - // The target dialect for the engine to translate the input to. - Dialect target_dialect = 4; - - // The mapping of full SQL object names from their current state to the - // desired output. - oneof output_name_mapping { - // The mapping of objects to their desired output names in list form. - ObjectNameMappingList name_mapping_list = 5; - } - - // The default source environment values for the translation. - SourceEnv source_env = 6; -} - -// The possible dialect options for translation. -message Dialect { - // The possible dialect options that this message represents. - oneof dialect_value { - // The BigQuery dialect - BigQueryDialect bigquery_dialect = 1; - - // The HiveQL dialect - HiveQLDialect hiveql_dialect = 2; - - // The Redshift dialect - RedshiftDialect redshift_dialect = 3; - - // The Teradata dialect - TeradataDialect teradata_dialect = 4; - - // The Oracle dialect - OracleDialect oracle_dialect = 5; - - // The SparkSQL dialect - SparkSQLDialect sparksql_dialect = 6; - - // The Snowflake dialect - SnowflakeDialect snowflake_dialect = 7; - - // The Netezza dialect - NetezzaDialect netezza_dialect = 8; - - // The Azure Synapse dialect - AzureSynapseDialect azure_synapse_dialect = 9; - - // The Vertica dialect - VerticaDialect vertica_dialect = 10; - - // The SQL Server dialect - SQLServerDialect sql_server_dialect = 11; - - // The Postgresql dialect - PostgresqlDialect postgresql_dialect = 12; - - // The Presto dialect - PrestoDialect presto_dialect = 13; - - // The MySQL dialect - MySQLDialect mysql_dialect = 14; - } -} - -// The dialect definition for BigQuery. -message BigQueryDialect {} - -// The dialect definition for HiveQL. -message HiveQLDialect {} - -// The dialect definition for Redshift. -message RedshiftDialect {} - -// The dialect definition for Teradata. -message TeradataDialect { - // The sub-dialect options for Teradata. - enum Mode { - // Unspecified mode. - MODE_UNSPECIFIED = 0; - - // Teradata SQL mode. - SQL = 1; - - // BTEQ mode (which includes SQL). - BTEQ = 2; - } - - // Which Teradata sub-dialect mode the user specifies. - Mode mode = 1; -} - -// The dialect definition for Oracle. -message OracleDialect {} - -// The dialect definition for SparkSQL. -message SparkSQLDialect {} - -// The dialect definition for Snowflake. -message SnowflakeDialect {} - -// The dialect definition for Netezza. -message NetezzaDialect {} - -// The dialect definition for Azure Synapse. -message AzureSynapseDialect {} - -// The dialect definition for Vertica. -message VerticaDialect {} - -// The dialect definition for SQL Server. -message SQLServerDialect {} - -// The dialect definition for Postgresql. -message PostgresqlDialect {} - -// The dialect definition for Presto. -message PrestoDialect {} - -// The dialect definition for MySQL. -message MySQLDialect {} - -// Represents a map of name mappings using a list of key:value proto messages of -// existing name to desired output name. -message ObjectNameMappingList { - // The elements of the object name map. - repeated ObjectNameMapping name_map = 1; -} - -// Represents a key-value pair of NameMappingKey to NameMappingValue to -// represent the mapping of SQL names from the input value to desired output. -message ObjectNameMapping { - // The name of the object in source that is being mapped. - NameMappingKey source = 1; - - // The desired target name of the object that is being mapped. - NameMappingValue target = 2; -} - -// The potential components of a full name mapping that will be mapped -// during translation in the source data warehouse. -message NameMappingKey { - // The type of the object that is being mapped. - enum Type { - // Unspecified name mapping type. - TYPE_UNSPECIFIED = 0; - - // The object being mapped is a database. - DATABASE = 1; - - // The object being mapped is a schema. - SCHEMA = 2; - - // The object being mapped is a relation. - RELATION = 3; - - // The object being mapped is an attribute. - ATTRIBUTE = 4; - - // The object being mapped is a relation alias. - RELATION_ALIAS = 5; - - // The object being mapped is a an attribute alias. - ATTRIBUTE_ALIAS = 6; - - // The object being mapped is a function. - FUNCTION = 7; - } - - // The type of object that is being mapped. - Type type = 1; - - // The database name (BigQuery project ID equivalent in the source data - // warehouse). - string database = 2; - - // The schema name (BigQuery dataset equivalent in the source data warehouse). - string schema = 3; - - // The relation name (BigQuery table or view equivalent in the source data - // warehouse). - string relation = 4; - - // The attribute name (BigQuery column equivalent in the source data - // warehouse). - string attribute = 5; -} - -// The potential components of a full name mapping that will be mapped -// during translation in the target data warehouse. -message NameMappingValue { - // The database name (BigQuery project ID equivalent in the target data - // warehouse). - string database = 1; - - // The schema name (BigQuery dataset equivalent in the target data warehouse). - string schema = 2; - - // The relation name (BigQuery table or view equivalent in the target data - // warehouse). - string relation = 3; - - // The attribute name (BigQuery column equivalent in the target data - // warehouse). - string attribute = 4; -} - -// Represents the default source environment values for the translation. -message SourceEnv { - // The default database name to fully qualify SQL objects when their database - // name is missing. - string default_database = 1; - - // The schema search path. When SQL objects are missing schema name, - // translation engine will search through this list to find the value. - repeated string schema_search_path = 2; -} diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js deleted file mode 100644 index 9aed895..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, migrationWorkflow) { - // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - */ - // const parent = 'abc123' - /** - * Required. The migration workflow to create. - */ - // const migrationWorkflow = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callCreateMigrationWorkflow() { - // Construct request - const request = { - parent, - migrationWorkflow, - }; - - // Run request - const response = await migrationClient.createMigrationWorkflow(request); - console.log(response); - } - - callCreateMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js deleted file mode 100644 index aeda293..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callDeleteMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.deleteMigrationWorkflow(request); - console.log(response); - } - - callDeleteMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js deleted file mode 100644 index df6f7d2..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - */ - // const name = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationSubtask() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationSubtask(request); - console.log(response); - } - - callGetMigrationSubtask(); - // [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js deleted file mode 100644 index 72f7565..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationWorkflow(request); - console.log(response); - } - - callGetMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js deleted file mode 100644 index 95e5833..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const parent = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - /** - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - /** - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - */ - // const filter = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationSubtasks() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationSubtasksAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationSubtasks(); - // [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js deleted file mode 100644 index 63402d4..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - */ - // const parent = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - /** - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationWorkflows() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationWorkflowsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationWorkflows(); - // [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js deleted file mode 100644 index 76c614c..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callStartMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.startMigrationWorkflow(request); - console.log(response); - } - - callStartMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json deleted file mode 100644 index c5cd62c..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json +++ /dev/null @@ -1,335 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-migration", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2", - "version": "v2" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", - "title": "MigrationService createMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Creates a migration workflow.", - "canonical": true, - "file": "migration_service.create_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 56, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "migration_workflow", - "type": ".google.cloud.bigquery.migration.v2.MigrationWorkflow" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", - "title": "MigrationService getMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration workflow.", - "canonical": true, - "file": "migration_service.get_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 55, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", - "title": "MigrationService listMigrationWorkflows Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration workflow.", - "canonical": true, - "file": "migration_service.list_migration_workflows.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 69, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", - "title": "MigrationService deleteMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Deletes a migration workflow by name.", - "canonical": true, - "file": "migration_service.delete_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 51, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", - "title": "MigrationService startMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", - "canonical": true, - "file": "migration_service.start_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 51, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", - "title": "MigrationService getMigrationSubtask Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration subtask.", - "canonical": true, - "file": "migration_service.get_migration_subtask.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 55, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationSubtask", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", - "title": "MigrationService listMigrationSubtasks Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration subtasks.", - "canonical": true, - "file": "migration_service.list_migration_subtasks.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 75, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v2/src/index.ts b/owl-bot-staging/v2/src/index.ts deleted file mode 100644 index 35a8fd9..0000000 --- a/owl-bot-staging/v2/src/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v2 from './v2'; -const MigrationServiceClient = v2.MigrationServiceClient; -type MigrationServiceClient = v2.MigrationServiceClient; -export {v2, MigrationServiceClient}; -export default {v2, MigrationServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v2/src/v2/gapic_metadata.json b/owl-bot-staging/v2/src/v2/gapic_metadata.json deleted file mode 100644 index e0aa12f..0000000 --- a/owl-bot-staging/v2/src/v2/gapic_metadata.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.migration.v2", - "libraryPackage": "@google-cloud/migration", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/src/v2/index.ts b/owl-bot-staging/v2/src/v2/index.ts deleted file mode 100644 index f75d208..0000000 --- a/owl-bot-staging/v2/src/v2/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2/src/v2/migration_service_client.ts b/owl-bot-staging/v2/src/v2/migration_service_client.ts deleted file mode 100644 index 20f6e71..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_client.ts +++ /dev/null @@ -1,1246 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; - -import { Transform } from 'stream'; -import { RequestType } from 'google-gax/build/src/apitypes'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v2/migration_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './migration_service_client_config.json'; - -const version = require('../../../package.json').version; - -/** - * Service to handle EDW migrations. - * @class - * @memberof v2 - */ -export class MigrationServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - migrationServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of MigrationServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof MigrationServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' - ), - migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listMigrationWorkflows: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), - listMigrationSubtasks: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.migration.v2.MigrationService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.migrationServiceStub) { - return this.migrationServiceStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.migration.v2.MigrationService. - this.migrationServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2.MigrationService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.migration.v2.MigrationService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const migrationServiceStubMethods = - ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; - for (const methodName of migrationServiceStubMethods) { - const callPromise = this.migrationServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.migrationServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates a migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - * @param {google.cloud.bigquery.migration.v2.MigrationWorkflow} request.migrationWorkflow - * Required. The migration workflow to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.create_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async - */ - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.get_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async - */ - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationWorkflow(request, options, callback); - } -/** - * Deletes a migration workflow by name. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.delete_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async - */ - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); - } -/** - * Starts a previously created migration workflow. I.e., the state transitions - * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - * An error will be signaled if the state is anything other than DRAFT or - * RUNNING. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.start_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async - */ - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.startMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration subtask. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.get_migration_subtask.js - * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async - */ - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationSubtask(request, options, callback); - } - - /** - * Lists previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse - ]>; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationWorkflows(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflowsStream( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationWorkflows`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.list_migration_workflows.js - * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async - */ - listMigrationWorkflowsAsync( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.asyncIterate( - this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - /** - * Lists previously created migration subtasks. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse - ]>; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationSubtasks(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasksStream( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationSubtasks`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.list_migration_subtasks.js - * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async - */ - listMigrationSubtasksAsync( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.asyncIterate( - this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified migrationSubtask resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @param {string} subtask - * @returns {string} Resource name string. - */ - migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { - return this.pathTemplates.migrationSubtaskPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - subtask: subtask, - }); - } - - /** - * Parse the project from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; - } - - /** - * Parse the location from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; - } - - /** - * Parse the workflow from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; - } - - /** - * Parse the subtask from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the subtask. - */ - matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; - } - - /** - * Return a fully-qualified migrationWorkflow resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @returns {string} Resource name string. - */ - migrationWorkflowPath(project:string,location:string,workflow:string) { - return this.pathTemplates.migrationWorkflowPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - }); - } - - /** - * Parse the project from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; - } - - /** - * Parse the location from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; - } - - /** - * Parse the workflow from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.migrationServiceStub && !this._terminated) { - return this.migrationServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v2/src/v2/migration_service_client_config.json b/owl-bot-staging/v2/src/v2/migration_service_client_config.json deleted file mode 100644 index 5832815..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_client_config.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.migration.v2.MigrationService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationWorkflows": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "DeleteMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "GetMigrationSubtask": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "ListMigrationSubtasks": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json deleted file mode 100644 index 57df7ab..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/migration/v2/migration_entities.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_error_details.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_metrics.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_service.proto", - "../../protos/google/cloud/bigquery/migration/v2/translation_config.proto" -] diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js deleted file mode 100644 index aa2c893..0000000 --- a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/migration'); - -function main() { - const migrationServiceClient = new migration.MigrationServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 0afe940..0000000 --- a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {MigrationServiceClient} from '@google-cloud/migration'; - -// check that the client class type name can be used -function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const migrationServiceClient = new MigrationServiceClient(); - doStuffWithMigrationServiceClient(migrationServiceClient); -} - -main(); diff --git a/owl-bot-staging/v2/system-test/install.ts b/owl-bot-staging/v2/system-test/install.ts deleted file mode 100644 index 8ec4522..0000000 --- a/owl-bot-staging/v2/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import { packNTest } from 'pack-n-play'; -import { readFileSync } from 'fs'; -import { describe, it } from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts deleted file mode 100644 index eb05cd6..0000000 --- a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts +++ /dev/null @@ -1,1254 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import { describe, it } from 'mocha'; -import * as migrationserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v2.MigrationServiceClient', () => { - it('has servicePath', () => { - const servicePath = migrationserviceModule.v2.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - - describe('createMigrationWorkflow', () => { - it('invokes createMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.createMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationWorkflow', () => { - it('invokes getMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - }); - }); - - describe('deleteMigrationWorkflow', () => { - it('invokes deleteMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.deleteMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - }); - }); - - describe('startMigrationWorkflow', () => { - it('invokes startMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.startMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationSubtask', () => { - it('invokes getMigrationSubtask without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationSubtask(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationSubtask( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationSubtask with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - }); - }); - - describe('listMigrationWorkflows', () => { - it('invokes listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationWorkflows(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflows without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationWorkflows( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationWorkflows(request), expectedError); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflowsStream without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationWorkflowsStream with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; - const iterable = client.listMigrationWorkflowsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationWorkflowsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('listMigrationSubtasks', () => { - it('invokes listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationSubtasks(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasks without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationSubtasks( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationSubtasks(request), expectedError); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasksStream without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationSubtasksStream with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; - const iterable = client.listMigrationSubtasksAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationSubtasksAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationSubtask', () => { - const fakePath = "/rendered/path/migrationSubtask"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - subtask: "subtaskValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationSubtaskPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationSubtaskPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationSubtaskPath', () => { - const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationSubtaskName', () => { - const result = client.matchProjectFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationSubtaskName', () => { - const result = client.matchLocationFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationSubtaskName', () => { - const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchSubtaskFromMigrationSubtaskName', () => { - const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "subtaskValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationWorkflow', () => { - const fakePath = "/rendered/path/migrationWorkflow"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationWorkflowPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationWorkflowPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationWorkflowPath', () => { - const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationWorkflowName', () => { - const result = client.matchProjectFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationWorkflowName', () => { - const result = client.matchLocationFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationWorkflowName', () => { - const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v2/tsconfig.json b/owl-bot-staging/v2/tsconfig.json deleted file mode 100644 index c78f1c8..0000000 --- a/owl-bot-staging/v2/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v2/webpack.config.js b/owl-bot-staging/v2/webpack.config.js deleted file mode 100644 index 9178c4b..0000000 --- a/owl-bot-staging/v2/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'MigrationService', - filename: './migration-service.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -}; diff --git a/owl-bot-staging/v2alpha/.eslintignore b/owl-bot-staging/v2alpha/.eslintignore deleted file mode 100644 index cfc348e..0000000 --- a/owl-bot-staging/v2alpha/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v2alpha/.eslintrc.json b/owl-bot-staging/v2alpha/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/owl-bot-staging/v2alpha/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v2alpha/.gitignore b/owl-bot-staging/v2alpha/.gitignore deleted file mode 100644 index 5d32b23..0000000 --- a/owl-bot-staging/v2alpha/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v2alpha/.jsdoc.js b/owl-bot-staging/v2alpha/.jsdoc.js deleted file mode 100644 index aabe555..0000000 --- a/owl-bot-staging/v2alpha/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/migration', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v2alpha/.mocharc.js b/owl-bot-staging/v2alpha/.mocharc.js deleted file mode 100644 index 481c522..0000000 --- a/owl-bot-staging/v2alpha/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v2alpha/.prettierrc.js b/owl-bot-staging/v2alpha/.prettierrc.js deleted file mode 100644 index 494e147..0000000 --- a/owl-bot-staging/v2alpha/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v2alpha/README.md b/owl-bot-staging/v2alpha/README.md deleted file mode 100644 index 4e0341c..0000000 --- a/owl-bot-staging/v2alpha/README.md +++ /dev/null @@ -1 +0,0 @@ -Migration: Nodejs Client diff --git a/owl-bot-staging/v2alpha/linkinator.config.json b/owl-bot-staging/v2alpha/linkinator.config.json deleted file mode 100644 index befd23c..0000000 --- a/owl-bot-staging/v2alpha/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v2alpha/package.json b/owl-bot-staging/v2alpha/package.json deleted file mode 100644 index 6b38f7b..0000000 --- a/owl-bot-staging/v2alpha/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "@google-cloud/migration", - "version": "0.1.0", - "description": "Migration client for Node.js", - "repository": "googleapis/nodejs-migration", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google migration", - "migration", - "migration service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^3.1.1" - }, - "devDependencies": { - "@types/mocha": "^9.1.0", - "@types/node": "^16.0.0", - "@types/sinon": "^10.0.8", - "c8": "^7.11.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.7", - "jsdoc-fresh": "^2.0.0", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^3.0.0", - "mocha": "^10.0.0", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^9.2.6", - "typescript": "^4.5.5", - "webpack": "^5.67.0", - "webpack-cli": "^4.9.1" - }, - "engines": { - "node": ">=v12" - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto deleted file mode 100644 index 0c6ea13..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "AssessmentTaskProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Assessment task config. -message AssessmentTaskDetails { - // Required. The Cloud Storage path for assessment input files. - string input_path = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The BigQuery dataset for output. - string output_dataset = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. An optional Cloud Storage path to write the query logs (which is - // then used as an input path on the translation task) - string querylogs_path = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data source or data warehouse type (eg: TERADATA/REDSHIFT) - // from which the input data is extracted. - string data_source = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// Details for an assessment task orchestration result. -message AssessmentOrchestrationResultDetails { - // Optional. The version used for the output table schemas. - string output_tables_schema_version = 1 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto deleted file mode 100644 index 50d4c75..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto +++ /dev/null @@ -1,244 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2alpha/assessment_task.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; -import "google/cloud/bigquery/migration/v2alpha/translation_task.proto"; -import "google/protobuf/any.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationEntitiesProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// A migration workflow which specifies what needs to be done for an EDW -// migration. -message MigrationWorkflow { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}" - }; - - // Possible migration workflow states. - enum State { - // Workflow state is unspecified. - STATE_UNSPECIFIED = 0; - - // Workflow is in draft status, i.e. tasks are not yet eligible for - // execution. - DRAFT = 1; - - // Workflow is running (i.e. tasks are eligible for execution). - RUNNING = 2; - - // Workflow is paused. Tasks currently in progress may continue, but no - // further tasks will be scheduled. - PAUSED = 3; - - // Workflow is complete. There should not be any task in a non-terminal - // state, but if they are (e.g. forced termination), they will not be - // scheduled. - COMPLETED = 4; - } - - // Output only. Immutable. The unique identifier for the migration workflow. The ID is - // server-generated. - // - // Example: `projects/123/locations/us/workflows/345` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The display name of the workflow. This can be set to give a workflow - // a descriptive name. There is no guarantee or enforcement of uniqueness. - string display_name = 6; - - // The tasks in a workflow in a named map. The name (i.e. key) has no - // meaning and is merely a convenient way to address a specific task - // in a workflow. - map tasks = 2; - - // Output only. That status of the workflow. - State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the workflow was created. - google.protobuf.Timestamp create_time = 4; - - // Time when the workflow was last updated. - google.protobuf.Timestamp last_update_time = 5; -} - -// A single task for a migration which has details about the configuration of -// the task. -message MigrationTask { - // Possible states of a migration task. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The task is waiting for orchestration. - PENDING = 1; - - // The task is assigned to an orchestrator. - ORCHESTRATING = 2; - - // The task is running, i.e. its subtasks are ready for execution. - RUNNING = 3; - - // Tha task is paused. Assigned subtasks can continue, but no new subtasks - // will be scheduled. - PAUSED = 4; - - // The task finished successfully. - SUCCEEDED = 5; - - // The task finished unsuccessfully. - FAILED = 6; - } - - // The details of the task. - oneof task_details { - // Task configuration for Assessment. - AssessmentTaskDetails assessment_task_details = 12; - - // Task configuration for Batch/Offline SQL Translation. - TranslationTaskDetails translation_task_details = 13; - } - - // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. - string id = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The type of the task. This must be a supported task type. - string type = 2; - - // DEPRECATED! Use one of the task_details below. - // The details of the task. The type URL must be one of the supported task - // details messages and correspond to the Task's type. - google.protobuf.Any details = 3; - - // Output only. The current state of the task. - State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in FAILED state. - google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the task was created. - google.protobuf.Timestamp create_time = 6; - - // Time when the task was last updated. - google.protobuf.Timestamp last_update_time = 7; - - // Output only. Additional information about the orchestration. - MigrationTaskOrchestrationResult orchestration_result = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A subtask for a migration which carries details about the configuration of -// the subtask. The content of the details should not matter to the end user, -// but is a contract between the subtask creator and subtask worker. -message MigrationSubtask { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" - }; - - // Possible states of a migration subtask. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The subtask is ready, i.e. it is ready for execution. - ACTIVE = 1; - - // The subtask is running, i.e. it is assigned to a worker for execution. - RUNNING = 2; - - // The subtask finished successfully. - SUCCEEDED = 3; - - // The subtask finished unsuccessfully. - FAILED = 4; - - // The subtask is paused, i.e., it will not be scheduled. If it was already - // assigned,it might still finish but no new lease renewals will be granted. - PAUSED = 5; - } - - // Output only. Immutable. The resource name for the migration subtask. The ID is - // server-generated. - // - // Example: `projects/123/locations/us/workflows/345/subtasks/678` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The unique ID of the task to which this subtask belongs. - string task_id = 2; - - // The type of the Subtask. The migration service does not check whether this - // is a known type. It is up to the task creator (i.e. orchestrator or worker) - // to ensure it only creates subtasks for which there are compatible workers - // polling for Subtasks. - string type = 3; - - // Output only. The current state of the subtask. - State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in FAILED state. - google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Provides details to errors and issues encountered while processing the - // subtask. Presence of error details does not mean that the subtask failed. - repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The number or resources with errors. Note: This is not the total - // number of errors as each resource can have more than one error. - // This is used to indicate truncation by having a `resource_error_count` - // that is higher than the size of `resource_error_details`. - int32 resource_error_count = 13; - - // Time when the subtask was created. - google.protobuf.Timestamp create_time = 7; - - // Time when the subtask was last updated. - google.protobuf.Timestamp last_update_time = 8; - - // The metrics for the subtask. - repeated TimeSeries metrics = 11; -} - -// Additional information from the orchestrator when it is done with the -// task orchestration. -message MigrationTaskOrchestrationResult { - // Details specific to the task type. - oneof details { - // Details specific to assessment task types. - AssessmentOrchestrationResultDetails assessment_details = 1; - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto deleted file mode 100644 index 89dac5e..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationErrorDetailsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Provides details for errors and the corresponding resources. -message ResourceErrorDetail { - // Required. Information about the resource where the error is located. - google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The error details for the resource. - repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. How many errors there are in total for the resource. Truncation can be - // indicated by having an `error_count` that is higher than the size of - // `error_details`. - int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Provides details for errors, e.g. issues that where encountered when -// processing a subtask. -message ErrorDetail { - // Optional. The exact location within the resource (if applicable). - ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Describes the cause of the error with structured detail. - google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Holds information about where the error is located. -message ErrorLocation { - // Optional. If applicable, denotes the line where the error occurred. A zero value - // means that there is no line information. - int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If applicable, denotes the column where the error occurred. A zero value - // means that there is no columns information. - int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto deleted file mode 100644 index ce60dd2..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/distribution.proto"; -import "google/api/field_behavior.proto"; -import "google/api/metric.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationMetricsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// The metrics object for a SubTask. -message TimeSeries { - // Required. The name of the metric. - // - // If the metric is not known by the service yet, it will be auto-created. - string metric = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The value type of the time series. - google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric kind of the time series. - // - // If present, it must be the same as the metric kind of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // this field specifies the metric kind of the new descriptor and must be - // either `GAUGE` (the default) or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time does not need to be supplied, but if it is supplied, it must - // equal the end time. For `DELTA` metrics, the start and end time should - // specify a non-zero interval, with subsequent points specifying contiguous - // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end - // time should specify a non-zero interval, with subsequent points specifying - // the same start time and increasing end times, until an event resets the - // cumulative value to zero and sets a new start time for the following - // points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A time interval extending just after a start time through an end time. -// If the start time is the same as the end time, then the interval -// represents a single point in time. -message TimeInterval { - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately +/-9.2x10^18. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately +/-10^(+/-300) and it has 16 significant digits of - // precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto deleted file mode 100644 index 9a184a1..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto +++ /dev/null @@ -1,247 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationServiceProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Service to handle EDW migrations. -service MigrationService { - option (google.api.default_host) = "bigquerymigration.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a migration workflow. - rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - post: "/v2alpha/{parent=projects/*/locations/*}/workflows" - body: "migration_workflow" - }; - option (google.api.method_signature) = "parent,migration_workflow"; - } - - // Gets a previously created migration workflow. - rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration workflow. - rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { - option (google.api.http) = { - get: "/v2alpha/{parent=projects/*/locations/*}/workflows" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a migration workflow by name. - rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Starts a previously created migration workflow. I.e., the state transitions - // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - // An error will be signaled if the state is anything other than DRAFT or - // RUNNING. - rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Gets a previously created migration subtask. - rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { - option (google.api.http) = { - get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration subtasks. - rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { - option (google.api.http) = { - get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" - }; - option (google.api.method_signature) = "parent"; - } -} - -// Request to create a migration workflow resource. -message CreateMigrationWorkflowRequest { - // Required. The name of the project to which this migration workflow belongs. - // Example: `projects/foo/locations/bar` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // Required. The migration workflow to create. - MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to get a previously created migration workflow. -message GetMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; -} - -// A request to list previously created migration workflows. -message ListMigrationWorkflowsRequest { - // Required. The project and location of the migration workflows to list. - // Example: `projects/123/locations/us` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; - - // The maximum number of migration workflows to return. The service may return - // fewer than this number. - int32 page_size = 3; - - // A page token, received from previous `ListMigrationWorkflows` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationWorkflows` - // must match the call that provided the page token. - string page_token = 4; -} - -// Response object for a `ListMigrationWorkflows` call. -message ListMigrationWorkflowsResponse { - // The migration workflows for the specified project / location. - repeated MigrationWorkflow migration_workflows = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a previously created migration workflow. -message DeleteMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to start a previously created migration workflow. -message StartMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to get a previously created migration subtasks. -message GetMigrationSubtaskRequest { - // Required. The unique identifier for the migration subtask. - // Example: `projects/123/locations/us/workflows/1234/subtasks/543` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to list previously created migration subtasks. -message ListMigrationSubtasksRequest { - // Required. The migration task of the subtasks to list. - // Example: `projects/123/locations/us/workflows/1234` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of migration tasks to return. The service may return - // fewer than this number. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token, received from previous `ListMigrationSubtasks` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationSubtasks` - // must match the call that provided the page token. - string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The filter to apply. This can be used to get the subtasks of a specific - // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - // task ID (not the name in the named map). - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response object for a `ListMigrationSubtasks` call. -message ListMigrationSubtasksResponse { - // The migration subtasks for the specified task. - repeated MigrationSubtask migration_subtasks = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto deleted file mode 100644 index bf4b27e..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "TranslationTaskProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Mapping between an input and output file to be translated in a subtask. -message TranslationFileMapping { - // The Cloud Storage path for a file to translation in a subtask. - string input_path = 1; - - // The Cloud Storage path to write back the corresponding input file to. - string output_path = 2; -} - -// The translation task config to capture necessary settings for a translation -// task and subtask. -message TranslationTaskDetails { - // The file encoding types. - enum FileEncoding { - // File encoding setting is not specified. - FILE_ENCODING_UNSPECIFIED = 0; - - // File encoding is UTF_8. - UTF_8 = 1; - - // File encoding is ISO_8859_1. - ISO_8859_1 = 2; - - // File encoding is US_ASCII. - US_ASCII = 3; - - // File encoding is UTF_16. - UTF_16 = 4; - - // File encoding is UTF_16LE. - UTF_16LE = 5; - - // File encoding is UTF_16BE. - UTF_16BE = 6; - } - - // The special token data type. - enum TokenType { - // Token type is not specified. - TOKEN_TYPE_UNSPECIFIED = 0; - - // Token type as string. - STRING = 1; - - // Token type as integer. - INT64 = 2; - - // Token type as numeric. - NUMERIC = 3; - - // Token type as boolean. - BOOL = 4; - - // Token type as float. - FLOAT64 = 5; - - // Token type as date. - DATE = 6; - - // Token type as timestamp. - TIMESTAMP = 7; - } - - // The language specific settings for the translation task. - oneof language_options { - // The Teradata SQL specific settings for the translation task. - TeradataOptions teradata_options = 10; - - // The BTEQ specific settings for the translation task. - BteqOptions bteq_options = 11; - } - - // The Cloud Storage path for translation input files. - string input_path = 1; - - // The Cloud Storage path for translation output files. - string output_path = 2; - - // Cloud Storage files to be processed for translation. - repeated TranslationFileMapping file_paths = 12; - - // The Cloud Storage path to DDL files as table schema to assist semantic - // translation. - string schema_path = 3; - - // The file encoding type. - FileEncoding file_encoding = 4; - - // The settings for SQL identifiers. - IdentifierSettings identifier_settings = 5; - - // The map capturing special tokens to be replaced during translation. The key - // is special token in string. The value is the token data type. This is used - // to translate SQL query template which contains special token as place - // holder. The special token makes a query invalid to parse. This map will be - // applied to annotate those special token with types to let parser understand - // how to parse them into proper structure with type information. - map special_token_map = 6; - - // The filter applied to translation details. - Filter filter = 7; - - // Specifies the exact name of the bigquery table ("dataset.table") to be used - // for surfacing raw translation errors. If the table does not exist, we will - // create it. If it already exists and the schema is the same, we will re-use. - // If the table exists and the schema is different, we will throw an error. - string translation_exception_table = 13; -} - -// The filter applied to fields of translation details. -message Filter { - // The list of prefixes used to exclude processing for input files. - repeated string input_file_exclusion_prefixes = 1; -} - -// Settings related to SQL identifiers. -message IdentifierSettings { - // The identifier case type. - enum IdentifierCase { - // The identifier case is not specified. - IDENTIFIER_CASE_UNSPECIFIED = 0; - - // Identifiers' cases will be kept as the original cases. - ORIGINAL = 1; - - // Identifiers will be in upper cases. - UPPER = 2; - - // Identifiers will be in lower cases. - LOWER = 3; - } - - // The SQL identifier rewrite mode. - enum IdentifierRewriteMode { - // SQL Identifier rewrite mode is unspecified. - IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0; - - // SQL identifiers won't be rewrite. - NONE = 1; - - // All SQL identifiers will be rewrite. - REWRITE_ALL = 2; - } - - // The setting to control output queries' identifier case. - IdentifierCase output_identifier_case = 1; - - // Specifies the rewrite mode for SQL identifiers. - IdentifierRewriteMode identifier_rewrite_mode = 2; -} - -// Teradata SQL specific translation task related settings. -message TeradataOptions { - -} - -// BTEQ translation task related settings. -message BteqOptions { - // Specifies the project and dataset in BigQuery that will be used for - // external table creation during the translation. - DatasetReference project_dataset = 1; - - // The Cloud Storage location to be used as the default path for files that - // are not otherwise specified in the file replacement map. - string default_path_uri = 2; - - // Maps the local paths that are used in BTEQ scripts (the keys) to the paths - // in Cloud Storage that should be used in their stead in the translation (the - // value). - map file_replacement_map = 3; -} - -// Reference to a BigQuery dataset. -message DatasetReference { - // A unique ID for this dataset, without the project name. The ID - // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). - // The maximum length is 1,024 characters. - string dataset_id = 1; - - // The ID of the project containing this dataset. - string project_id = 2; -} diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js deleted file mode 100644 index 4004520..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, migrationWorkflow) { - // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - */ - // const parent = 'abc123' - /** - * Required. The migration workflow to create. - */ - // const migrationWorkflow = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callCreateMigrationWorkflow() { - // Construct request - const request = { - parent, - migrationWorkflow, - }; - - // Run request - const response = await migrationClient.createMigrationWorkflow(request); - console.log(response); - } - - callCreateMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js deleted file mode 100644 index 04adf0d..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callDeleteMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.deleteMigrationWorkflow(request); - console.log(response); - } - - callDeleteMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js deleted file mode 100644 index 8bf28e2..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - */ - // const name = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationSubtask() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationSubtask(request); - console.log(response); - } - - callGetMigrationSubtask(); - // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js deleted file mode 100644 index bbc9e68..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationWorkflow(request); - console.log(response); - } - - callGetMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js deleted file mode 100644 index 61a50a0..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const parent = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - /** - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - /** - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - */ - // const filter = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationSubtasks() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationSubtasksAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationSubtasks(); - // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js deleted file mode 100644 index 41d06f8..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - */ - // const parent = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - /** - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationWorkflows() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationWorkflowsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationWorkflows(); - // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js deleted file mode 100644 index 236300c..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callStartMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.startMigrationWorkflow(request); - console.log(response); - } - - callStartMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json deleted file mode 100644 index e8dd517..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json +++ /dev/null @@ -1,335 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-migration", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2alpha", - "version": "v2alpha" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", - "title": "MigrationService createMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Creates a migration workflow.", - "canonical": true, - "file": "migration_service.create_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 56, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "migration_workflow", - "type": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", - "title": "MigrationService getMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration workflow.", - "canonical": true, - "file": "migration_service.get_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 55, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", - "title": "MigrationService listMigrationWorkflows Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration workflow.", - "canonical": true, - "file": "migration_service.list_migration_workflows.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 69, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", - "title": "MigrationService deleteMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Deletes a migration workflow by name.", - "canonical": true, - "file": "migration_service.delete_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 51, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", - "title": "MigrationService startMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", - "canonical": true, - "file": "migration_service.start_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 51, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", - "title": "MigrationService getMigrationSubtask Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration subtask.", - "canonical": true, - "file": "migration_service.get_migration_subtask.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 55, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationSubtask", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", - "title": "MigrationService listMigrationSubtasks Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration subtasks.", - "canonical": true, - "file": "migration_service.list_migration_subtasks.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 75, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v2alpha/src/index.ts b/owl-bot-staging/v2alpha/src/index.ts deleted file mode 100644 index 288e629..0000000 --- a/owl-bot-staging/v2alpha/src/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v2alpha from './v2alpha'; -const MigrationServiceClient = v2alpha.MigrationServiceClient; -type MigrationServiceClient = v2alpha.MigrationServiceClient; -export {v2alpha, MigrationServiceClient}; -export default {v2alpha, MigrationServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json deleted file mode 100644 index f751ba9..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.migration.v2alpha", - "libraryPackage": "@google-cloud/migration", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/index.ts b/owl-bot-staging/v2alpha/src/v2alpha/index.ts deleted file mode 100644 index f75d208..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts deleted file mode 100644 index 50e1053..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts +++ /dev/null @@ -1,1246 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; - -import { Transform } from 'stream'; -import { RequestType } from 'google-gax/build/src/apitypes'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v2alpha/migration_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './migration_service_client_config.json'; - -const version = require('../../../package.json').version; - -/** - * Service to handle EDW migrations. - * @class - * @memberof v2alpha - */ -export class MigrationServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - migrationServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of MigrationServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof MigrationServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' - ), - migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listMigrationWorkflows: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), - listMigrationSubtasks: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.migration.v2alpha.MigrationService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.migrationServiceStub) { - return this.migrationServiceStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.migration.v2alpha.MigrationService. - this.migrationServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2alpha.MigrationService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.migration.v2alpha.MigrationService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const migrationServiceStubMethods = - ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; - for (const methodName of migrationServiceStubMethods) { - const callPromise = this.migrationServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.migrationServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates a migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - * @param {google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} request.migrationWorkflow - * Required. The migration workflow to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.create_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async - */ - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.get_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async - */ - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationWorkflow(request, options, callback); - } -/** - * Deletes a migration workflow by name. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.delete_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async - */ - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); - } -/** - * Starts a previously created migration workflow. I.e., the state transitions - * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - * An error will be signaled if the state is anything other than DRAFT or - * RUNNING. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.start_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async - */ - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.startMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration subtask. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.get_migration_subtask.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async - */ - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationSubtask(request, options, callback); - } - - /** - * Lists previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse - ]>; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationWorkflows(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflowsStream( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationWorkflows`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.list_migration_workflows.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async - */ - listMigrationWorkflowsAsync( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.asyncIterate( - this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - /** - * Lists previously created migration subtasks. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse - ]>; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationSubtasks(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasksStream( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationSubtasks`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.list_migration_subtasks.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async - */ - listMigrationSubtasksAsync( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.asyncIterate( - this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified migrationSubtask resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @param {string} subtask - * @returns {string} Resource name string. - */ - migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { - return this.pathTemplates.migrationSubtaskPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - subtask: subtask, - }); - } - - /** - * Parse the project from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; - } - - /** - * Parse the location from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; - } - - /** - * Parse the workflow from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; - } - - /** - * Parse the subtask from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the subtask. - */ - matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; - } - - /** - * Return a fully-qualified migrationWorkflow resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @returns {string} Resource name string. - */ - migrationWorkflowPath(project:string,location:string,workflow:string) { - return this.pathTemplates.migrationWorkflowPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - }); - } - - /** - * Parse the project from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; - } - - /** - * Parse the location from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; - } - - /** - * Parse the workflow from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.migrationServiceStub && !this._terminated) { - return this.migrationServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json deleted file mode 100644 index 2184b83..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.migration.v2alpha.MigrationService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetMigrationWorkflow": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationWorkflows": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "DeleteMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartMigrationWorkflow": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "GetMigrationSubtask": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationSubtasks": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - } - } - } - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json deleted file mode 100644 index 8e91e42..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto" -] diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js deleted file mode 100644 index aa2c893..0000000 --- a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/migration'); - -function main() { - const migrationServiceClient = new migration.MigrationServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 0afe940..0000000 --- a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {MigrationServiceClient} from '@google-cloud/migration'; - -// check that the client class type name can be used -function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const migrationServiceClient = new MigrationServiceClient(); - doStuffWithMigrationServiceClient(migrationServiceClient); -} - -main(); diff --git a/owl-bot-staging/v2alpha/system-test/install.ts b/owl-bot-staging/v2alpha/system-test/install.ts deleted file mode 100644 index 8ec4522..0000000 --- a/owl-bot-staging/v2alpha/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import { packNTest } from 'pack-n-play'; -import { readFileSync } from 'fs'; -import { describe, it } from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts deleted file mode 100644 index 4b80fbb..0000000 --- a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts +++ /dev/null @@ -1,1254 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import { describe, it } from 'mocha'; -import * as migrationserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v2alpha.MigrationServiceClient', () => { - it('has servicePath', () => { - const servicePath = migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - - describe('createMigrationWorkflow', () => { - it('invokes createMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.createMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationWorkflow', () => { - it('invokes getMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - }); - }); - - describe('deleteMigrationWorkflow', () => { - it('invokes deleteMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.deleteMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - }); - }); - - describe('startMigrationWorkflow', () => { - it('invokes startMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.startMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationSubtask', () => { - it('invokes getMigrationSubtask without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationSubtask(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationSubtask( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationSubtask with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - }); - }); - - describe('listMigrationWorkflows', () => { - it('invokes listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationWorkflows(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflows without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationWorkflows( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationWorkflows(request), expectedError); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflowsStream without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationWorkflowsStream with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; - const iterable = client.listMigrationWorkflowsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationWorkflowsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('listMigrationSubtasks', () => { - it('invokes listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationSubtasks(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasks without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationSubtasks( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationSubtasks(request), expectedError); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasksStream without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationSubtasksStream with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; - const iterable = client.listMigrationSubtasksAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationSubtasksAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationSubtask', () => { - const fakePath = "/rendered/path/migrationSubtask"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - subtask: "subtaskValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationSubtaskPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationSubtaskPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationSubtaskPath', () => { - const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationSubtaskName', () => { - const result = client.matchProjectFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationSubtaskName', () => { - const result = client.matchLocationFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationSubtaskName', () => { - const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchSubtaskFromMigrationSubtaskName', () => { - const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "subtaskValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationWorkflow', () => { - const fakePath = "/rendered/path/migrationWorkflow"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationWorkflowPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationWorkflowPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationWorkflowPath', () => { - const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationWorkflowName', () => { - const result = client.matchProjectFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationWorkflowName', () => { - const result = client.matchLocationFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationWorkflowName', () => { - const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v2alpha/tsconfig.json b/owl-bot-staging/v2alpha/tsconfig.json deleted file mode 100644 index c78f1c8..0000000 --- a/owl-bot-staging/v2alpha/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v2alpha/webpack.config.js b/owl-bot-staging/v2alpha/webpack.config.js deleted file mode 100644 index 9178c4b..0000000 --- a/owl-bot-staging/v2alpha/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'MigrationService', - filename: './migration-service.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -}; diff --git a/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto index b5e9014..9a184a1 100644 --- a/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto +++ b/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto @@ -25,7 +25,6 @@ import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; -import "google/rpc/error_details.proto"; option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; From 49bea408c0e69fc8898f39a217f401826d991429 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Sat, 20 Aug 2022 01:13:31 +0000 Subject: [PATCH 05/13] fix: better support for fallback mode PiperOrigin-RevId: 468790263 Source-Link: https://github.com/googleapis/googleapis/commit/873ab456273d105245df0fb82a6c17a814553b80 Source-Link: https://github.com/googleapis/googleapis-gen/commit/cb6f37aeff2a3472e40a7bbace8c67d75e24bee5 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2I2ZjM3YWVmZjJhMzQ3MmU0MGE3YmJhY2U4YzY3ZDc1ZTI0YmVlNSJ9 --- owl-bot-staging/v2/.eslintignore | 7 + owl-bot-staging/v2/.eslintrc.json | 3 + owl-bot-staging/v2/.gitignore | 14 + owl-bot-staging/v2/.jsdoc.js | 55 + owl-bot-staging/v2/.mocharc.js | 33 + owl-bot-staging/v2/.prettierrc.js | 22 + owl-bot-staging/v2/README.md | 1 + owl-bot-staging/v2/linkinator.config.json | 16 + owl-bot-staging/v2/package.json | 64 + .../migration/v2/migration_entities.proto | 233 +++ .../v2/migration_error_details.proto | 62 + .../migration/v2/migration_metrics.proto | 111 ++ .../migration/v2/migration_service.proto | 245 ++++ .../migration/v2/translation_config.proto | 257 ++++ ...ation_service.create_migration_workflow.js | 67 + ...ation_service.delete_migration_workflow.js | 62 + ...migration_service.get_migration_subtask.js | 66 + ...igration_service.get_migration_workflow.js | 66 + ...gration_service.list_migration_subtasks.js | 86 ++ ...ration_service.list_migration_workflows.js | 80 ++ ...ration_service.start_migration_workflow.js | 62 + ...ta.google.cloud.bigquery.migration.v2.json | 335 +++++ owl-bot-staging/v2/src/index.ts | 25 + owl-bot-staging/v2/src/v2/gapic_metadata.json | 101 ++ owl-bot-staging/v2/src/v2/index.ts | 19 + .../v2/src/v2/migration_service_client.ts | 1246 ++++++++++++++++ .../v2/migration_service_client_config.json | 71 + .../src/v2/migration_service_proto_list.json | 7 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 32 + owl-bot-staging/v2/system-test/install.ts | 49 + .../v2/test/gapic_migration_service_v2.ts | 1256 +++++++++++++++++ owl-bot-staging/v2/tsconfig.json | 19 + owl-bot-staging/v2/webpack.config.js | 64 + owl-bot-staging/v2alpha/.eslintignore | 7 + owl-bot-staging/v2alpha/.eslintrc.json | 3 + owl-bot-staging/v2alpha/.gitignore | 14 + owl-bot-staging/v2alpha/.jsdoc.js | 55 + owl-bot-staging/v2alpha/.mocharc.js | 33 + owl-bot-staging/v2alpha/.prettierrc.js | 22 + owl-bot-staging/v2alpha/README.md | 1 + .../v2alpha/linkinator.config.json | 16 + owl-bot-staging/v2alpha/package.json | 64 + .../migration/v2alpha/assessment_task.proto | 49 + .../v2alpha/migration_entities.proto | 244 ++++ .../v2alpha/migration_error_details.proto | 62 + .../migration/v2alpha/migration_metrics.proto | 111 ++ .../migration/v2alpha/migration_service.proto | 247 ++++ .../migration/v2alpha/translation_task.proto | 207 +++ ...ation_service.create_migration_workflow.js | 67 + ...ation_service.delete_migration_workflow.js | 62 + ...migration_service.get_migration_subtask.js | 66 + ...igration_service.get_migration_workflow.js | 66 + ...gration_service.list_migration_subtasks.js | 86 ++ ...ration_service.list_migration_workflows.js | 80 ++ ...ration_service.start_migration_workflow.js | 62 + ...ogle.cloud.bigquery.migration.v2alpha.json | 335 +++++ owl-bot-staging/v2alpha/src/index.ts | 25 + .../v2alpha/src/v2alpha/gapic_metadata.json | 101 ++ owl-bot-staging/v2alpha/src/v2alpha/index.ts | 19 + .../src/v2alpha/migration_service_client.ts | 1246 ++++++++++++++++ .../migration_service_client_config.json | 73 + .../v2alpha/migration_service_proto_list.json | 8 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 32 + .../v2alpha/system-test/install.ts | 49 + .../test/gapic_migration_service_v2alpha.ts | 1256 +++++++++++++++++ owl-bot-staging/v2alpha/tsconfig.json | 19 + owl-bot-staging/v2alpha/webpack.config.js | 64 + 69 files changed, 9741 insertions(+) create mode 100644 owl-bot-staging/v2/.eslintignore create mode 100644 owl-bot-staging/v2/.eslintrc.json create mode 100644 owl-bot-staging/v2/.gitignore create mode 100644 owl-bot-staging/v2/.jsdoc.js create mode 100644 owl-bot-staging/v2/.mocharc.js create mode 100644 owl-bot-staging/v2/.prettierrc.js create mode 100644 owl-bot-staging/v2/README.md create mode 100644 owl-bot-staging/v2/linkinator.config.json create mode 100644 owl-bot-staging/v2/package.json create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json create mode 100644 owl-bot-staging/v2/src/index.ts create mode 100644 owl-bot-staging/v2/src/v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/src/v2/index.ts create mode 100644 owl-bot-staging/v2/src/v2/migration_service_client.ts create mode 100644 owl-bot-staging/v2/src/v2/migration_service_client_config.json create mode 100644 owl-bot-staging/v2/src/v2/migration_service_proto_list.json create mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v2/system-test/install.ts create mode 100644 owl-bot-staging/v2/test/gapic_migration_service_v2.ts create mode 100644 owl-bot-staging/v2/tsconfig.json create mode 100644 owl-bot-staging/v2/webpack.config.js create mode 100644 owl-bot-staging/v2alpha/.eslintignore create mode 100644 owl-bot-staging/v2alpha/.eslintrc.json create mode 100644 owl-bot-staging/v2alpha/.gitignore create mode 100644 owl-bot-staging/v2alpha/.jsdoc.js create mode 100644 owl-bot-staging/v2alpha/.mocharc.js create mode 100644 owl-bot-staging/v2alpha/.prettierrc.js create mode 100644 owl-bot-staging/v2alpha/README.md create mode 100644 owl-bot-staging/v2alpha/linkinator.config.json create mode 100644 owl-bot-staging/v2alpha/package.json create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json create mode 100644 owl-bot-staging/v2alpha/src/index.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/index.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json create mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v2alpha/system-test/install.ts create mode 100644 owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts create mode 100644 owl-bot-staging/v2alpha/tsconfig.json create mode 100644 owl-bot-staging/v2alpha/webpack.config.js diff --git a/owl-bot-staging/v2/.eslintignore b/owl-bot-staging/v2/.eslintignore new file mode 100644 index 0000000..cfc348e --- /dev/null +++ b/owl-bot-staging/v2/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v2/.eslintrc.json b/owl-bot-staging/v2/.eslintrc.json new file mode 100644 index 0000000..7821534 --- /dev/null +++ b/owl-bot-staging/v2/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v2/.gitignore b/owl-bot-staging/v2/.gitignore new file mode 100644 index 0000000..5d32b23 --- /dev/null +++ b/owl-bot-staging/v2/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v2/.jsdoc.js b/owl-bot-staging/v2/.jsdoc.js new file mode 100644 index 0000000..aabe555 --- /dev/null +++ b/owl-bot-staging/v2/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/migration', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v2/.mocharc.js b/owl-bot-staging/v2/.mocharc.js new file mode 100644 index 0000000..481c522 --- /dev/null +++ b/owl-bot-staging/v2/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v2/.prettierrc.js b/owl-bot-staging/v2/.prettierrc.js new file mode 100644 index 0000000..494e147 --- /dev/null +++ b/owl-bot-staging/v2/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v2/README.md b/owl-bot-staging/v2/README.md new file mode 100644 index 0000000..4e0341c --- /dev/null +++ b/owl-bot-staging/v2/README.md @@ -0,0 +1 @@ +Migration: Nodejs Client diff --git a/owl-bot-staging/v2/linkinator.config.json b/owl-bot-staging/v2/linkinator.config.json new file mode 100644 index 0000000..befd23c --- /dev/null +++ b/owl-bot-staging/v2/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2/package.json b/owl-bot-staging/v2/package.json new file mode 100644 index 0000000..8172b89 --- /dev/null +++ b/owl-bot-staging/v2/package.json @@ -0,0 +1,64 @@ +{ + "name": "@google-cloud/migration", + "version": "0.1.0", + "description": "Migration client for Node.js", + "repository": "googleapis/nodejs-migration", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google migration", + "migration", + "migration service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^3.2.0" + }, + "devDependencies": { + "@types/mocha": "^9.1.1", + "@types/node": "^16.11.50", + "@types/sinon": "^10.0.13", + "c8": "^7.12.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.11", + "jsdoc-fresh": "^2.0.0", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^4.0.2", + "mocha": "^10.0.0", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^8.4.0", + "typescript": "^4.7.4", + "webpack": "^4.46.0", + "webpack-cli": "^4.10.0" + }, + "engines": { + "node": ">=v12" + } +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto new file mode 100644 index 0000000..7d77bae --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto @@ -0,0 +1,233 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2/migration_metrics.proto"; +import "google/cloud/bigquery/migration/v2/translation_config.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. + // The ID is server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // The details of the task. + oneof task_details { + // Task configuration for Batch SQL Translation. + TranslationConfigDetails translation_config_details = 14; + } + + // Output only. Immutable. The unique identifier for the migration task. The + // ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be one of the supported task types: + // Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ, + // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, + // Translation_Snowflake2BQ, Translation_Netezza2BQ, + // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, + // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. + string type = 2; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in + // FAILED state. + google.rpc.ErrorInfo processing_error = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID + // is server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in + // FAILED state. + google.rpc.ErrorInfo processing_error = 6 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while + // processing the subtask. Presence of error details does not mean that the + // subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto new file mode 100644 index 0000000..199e2db --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto new file mode 100644 index 0000000..e52fead --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately `+/-9.2x10^18`. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately `+/-10^(+/-300)` and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto new file mode 100644 index 0000000..3c1a89e --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto @@ -0,0 +1,245 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2/migration_entities.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto new file mode 100644 index 0000000..994140d --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto @@ -0,0 +1,257 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "TranslationConfigProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// The translation config to capture necessary settings for a translation task +// and subtask. +message TranslationConfigDetails { + // The chosen path where the source for input files will be found. + oneof source_location { + // The Cloud Storage path for a directory of files to translate in a task. + string gcs_source_path = 1; + } + + // The chosen path where the destination for output files will be found. + oneof target_location { + // The Cloud Storage path to write back the corresponding input files to. + string gcs_target_path = 2; + } + + // The dialect of the input files. + Dialect source_dialect = 3; + + // The target dialect for the engine to translate the input to. + Dialect target_dialect = 4; + + // The mapping of full SQL object names from their current state to the + // desired output. + oneof output_name_mapping { + // The mapping of objects to their desired output names in list form. + ObjectNameMappingList name_mapping_list = 5; + } + + // The default source environment values for the translation. + SourceEnv source_env = 6; +} + +// The possible dialect options for translation. +message Dialect { + // The possible dialect options that this message represents. + oneof dialect_value { + // The BigQuery dialect + BigQueryDialect bigquery_dialect = 1; + + // The HiveQL dialect + HiveQLDialect hiveql_dialect = 2; + + // The Redshift dialect + RedshiftDialect redshift_dialect = 3; + + // The Teradata dialect + TeradataDialect teradata_dialect = 4; + + // The Oracle dialect + OracleDialect oracle_dialect = 5; + + // The SparkSQL dialect + SparkSQLDialect sparksql_dialect = 6; + + // The Snowflake dialect + SnowflakeDialect snowflake_dialect = 7; + + // The Netezza dialect + NetezzaDialect netezza_dialect = 8; + + // The Azure Synapse dialect + AzureSynapseDialect azure_synapse_dialect = 9; + + // The Vertica dialect + VerticaDialect vertica_dialect = 10; + + // The SQL Server dialect + SQLServerDialect sql_server_dialect = 11; + + // The Postgresql dialect + PostgresqlDialect postgresql_dialect = 12; + + // The Presto dialect + PrestoDialect presto_dialect = 13; + + // The MySQL dialect + MySQLDialect mysql_dialect = 14; + } +} + +// The dialect definition for BigQuery. +message BigQueryDialect {} + +// The dialect definition for HiveQL. +message HiveQLDialect {} + +// The dialect definition for Redshift. +message RedshiftDialect {} + +// The dialect definition for Teradata. +message TeradataDialect { + // The sub-dialect options for Teradata. + enum Mode { + // Unspecified mode. + MODE_UNSPECIFIED = 0; + + // Teradata SQL mode. + SQL = 1; + + // BTEQ mode (which includes SQL). + BTEQ = 2; + } + + // Which Teradata sub-dialect mode the user specifies. + Mode mode = 1; +} + +// The dialect definition for Oracle. +message OracleDialect {} + +// The dialect definition for SparkSQL. +message SparkSQLDialect {} + +// The dialect definition for Snowflake. +message SnowflakeDialect {} + +// The dialect definition for Netezza. +message NetezzaDialect {} + +// The dialect definition for Azure Synapse. +message AzureSynapseDialect {} + +// The dialect definition for Vertica. +message VerticaDialect {} + +// The dialect definition for SQL Server. +message SQLServerDialect {} + +// The dialect definition for Postgresql. +message PostgresqlDialect {} + +// The dialect definition for Presto. +message PrestoDialect {} + +// The dialect definition for MySQL. +message MySQLDialect {} + +// Represents a map of name mappings using a list of key:value proto messages of +// existing name to desired output name. +message ObjectNameMappingList { + // The elements of the object name map. + repeated ObjectNameMapping name_map = 1; +} + +// Represents a key-value pair of NameMappingKey to NameMappingValue to +// represent the mapping of SQL names from the input value to desired output. +message ObjectNameMapping { + // The name of the object in source that is being mapped. + NameMappingKey source = 1; + + // The desired target name of the object that is being mapped. + NameMappingValue target = 2; +} + +// The potential components of a full name mapping that will be mapped +// during translation in the source data warehouse. +message NameMappingKey { + // The type of the object that is being mapped. + enum Type { + // Unspecified name mapping type. + TYPE_UNSPECIFIED = 0; + + // The object being mapped is a database. + DATABASE = 1; + + // The object being mapped is a schema. + SCHEMA = 2; + + // The object being mapped is a relation. + RELATION = 3; + + // The object being mapped is an attribute. + ATTRIBUTE = 4; + + // The object being mapped is a relation alias. + RELATION_ALIAS = 5; + + // The object being mapped is a an attribute alias. + ATTRIBUTE_ALIAS = 6; + + // The object being mapped is a function. + FUNCTION = 7; + } + + // The type of object that is being mapped. + Type type = 1; + + // The database name (BigQuery project ID equivalent in the source data + // warehouse). + string database = 2; + + // The schema name (BigQuery dataset equivalent in the source data warehouse). + string schema = 3; + + // The relation name (BigQuery table or view equivalent in the source data + // warehouse). + string relation = 4; + + // The attribute name (BigQuery column equivalent in the source data + // warehouse). + string attribute = 5; +} + +// The potential components of a full name mapping that will be mapped +// during translation in the target data warehouse. +message NameMappingValue { + // The database name (BigQuery project ID equivalent in the target data + // warehouse). + string database = 1; + + // The schema name (BigQuery dataset equivalent in the target data warehouse). + string schema = 2; + + // The relation name (BigQuery table or view equivalent in the target data + // warehouse). + string relation = 3; + + // The attribute name (BigQuery column equivalent in the target data + // warehouse). + string attribute = 4; +} + +// Represents the default source environment values for the translation. +message SourceEnv { + // The default database name to fully qualify SQL objects when their database + // name is missing. + string default_database = 1; + + // The schema search path. When SQL objects are missing schema name, + // translation engine will search through this list to find the value. + repeated string schema_search_path = 2; +} diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js new file mode 100644 index 0000000..817b25b --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js @@ -0,0 +1,67 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, migrationWorkflow) { + // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + */ + // const parent = 'abc123' + /** + * Required. The migration workflow to create. + */ + // const migrationWorkflow = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callCreateMigrationWorkflow() { + // Construct request + const request = { + parent, + migrationWorkflow, + }; + + // Run request + const response = await migrationClient.createMigrationWorkflow(request); + console.log(response); + } + + callCreateMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js new file mode 100644 index 0000000..b3907a6 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callDeleteMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.deleteMigrationWorkflow(request); + console.log(response); + } + + callDeleteMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js new file mode 100644 index 0000000..949eb5a --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + */ + // const name = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationSubtask() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationSubtask(request); + console.log(response); + } + + callGetMigrationSubtask(); + // [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js new file mode 100644 index 0000000..39e79e3 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationWorkflow(request); + console.log(response); + } + + callGetMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js new file mode 100644 index 0000000..4a63df1 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js @@ -0,0 +1,86 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const parent = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + /** + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + /** + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + */ + // const filter = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationSubtasks() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationSubtasksAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationSubtasks(); + // [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js new file mode 100644 index 0000000..66dc66f --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js @@ -0,0 +1,80 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + */ + // const parent = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + /** + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationWorkflows() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationWorkflowsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationWorkflows(); + // [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js new file mode 100644 index 0000000..34fb75b --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callStartMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.startMigrationWorkflow(request); + console.log(response); + } + + callStartMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json new file mode 100644 index 0000000..81ec8bb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json @@ -0,0 +1,335 @@ +{ + "clientLibrary": { + "name": "nodejs-migration", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2", + "version": "v2" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", + "title": "MigrationService createMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Creates a migration workflow.", + "canonical": true, + "file": "migration_service.create_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 59, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "migration_workflow", + "type": ".google.cloud.bigquery.migration.v2.MigrationWorkflow" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", + "title": "MigrationService getMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration workflow.", + "canonical": true, + "file": "migration_service.get_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", + "title": "MigrationService listMigrationWorkflows Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration workflow.", + "canonical": true, + "file": "migration_service.list_migration_workflows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 72, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", + "title": "MigrationService deleteMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Deletes a migration workflow by name.", + "canonical": true, + "file": "migration_service.delete_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", + "title": "MigrationService startMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", + "canonical": true, + "file": "migration_service.start_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", + "title": "MigrationService getMigrationSubtask Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration subtask.", + "canonical": true, + "file": "migration_service.get_migration_subtask.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationSubtask", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", + "title": "MigrationService listMigrationSubtasks Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration subtasks.", + "canonical": true, + "file": "migration_service.list_migration_subtasks.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 78, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v2/src/index.ts b/owl-bot-staging/v2/src/index.ts new file mode 100644 index 0000000..35a8fd9 --- /dev/null +++ b/owl-bot-staging/v2/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v2 from './v2'; +const MigrationServiceClient = v2.MigrationServiceClient; +type MigrationServiceClient = v2.MigrationServiceClient; +export {v2, MigrationServiceClient}; +export default {v2, MigrationServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v2/src/v2/gapic_metadata.json b/owl-bot-staging/v2/src/v2/gapic_metadata.json new file mode 100644 index 0000000..e0aa12f --- /dev/null +++ b/owl-bot-staging/v2/src/v2/gapic_metadata.json @@ -0,0 +1,101 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.migration.v2", + "libraryPackage": "@google-cloud/migration", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/src/v2/index.ts b/owl-bot-staging/v2/src/v2/index.ts new file mode 100644 index 0000000..f75d208 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2/src/v2/migration_service_client.ts b/owl-bot-staging/v2/src/v2/migration_service_client.ts new file mode 100644 index 0000000..d99d7a2 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_client.ts @@ -0,0 +1,1246 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; + +import {Transform} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v2/migration_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './migration_service_client_config.json'; + +const version = require('../../../package.json').version; + +/** + * Service to handle EDW migrations. + * @class + * @memberof v2 + */ +export class MigrationServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + migrationServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MigrationServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof MigrationServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' + ), + migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listMigrationWorkflows: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), + listMigrationSubtasks: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.migration.v2.MigrationService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.migrationServiceStub) { + return this.migrationServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.migration.v2.MigrationService. + this.migrationServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2.MigrationService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.migration.v2.MigrationService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const migrationServiceStubMethods = + ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; + for (const methodName of migrationServiceStubMethods) { + const callPromise = this.migrationServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.migrationServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates a migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param {google.cloud.bigquery.migration.v2.MigrationWorkflow} request.migrationWorkflow + * Required. The migration workflow to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.create_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async + */ + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.get_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async + */ + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationWorkflow(request, options, callback); + } +/** + * Deletes a migration workflow by name. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.delete_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async + */ + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); + } +/** + * Starts a previously created migration workflow. I.e., the state transitions + * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + * An error will be signaled if the state is anything other than DRAFT or + * RUNNING. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.start_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async + */ + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.startMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration subtask. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.get_migration_subtask.js + * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async + */ + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationSubtask(request, options, callback); + } + + /** + * Lists previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse + ]>; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationWorkflows(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflowsStream( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.createStream( + this.innerApiCalls.listMigrationWorkflows as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationWorkflows`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.list_migration_workflows.js + * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async + */ + listMigrationWorkflowsAsync( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.asyncIterate( + this.innerApiCalls['listMigrationWorkflows'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + /** + * Lists previously created migration subtasks. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse + ]>; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationSubtasks(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasksStream( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.createStream( + this.innerApiCalls.listMigrationSubtasks as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationSubtasks`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.list_migration_subtasks.js + * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async + */ + listMigrationSubtasksAsync( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.asyncIterate( + this.innerApiCalls['listMigrationSubtasks'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified migrationSubtask resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @param {string} subtask + * @returns {string} Resource name string. + */ + migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { + return this.pathTemplates.migrationSubtaskPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + subtask: subtask, + }); + } + + /** + * Parse the project from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; + } + + /** + * Parse the location from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; + } + + /** + * Parse the workflow from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; + } + + /** + * Parse the subtask from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the subtask. + */ + matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; + } + + /** + * Return a fully-qualified migrationWorkflow resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @returns {string} Resource name string. + */ + migrationWorkflowPath(project:string,location:string,workflow:string) { + return this.pathTemplates.migrationWorkflowPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + }); + } + + /** + * Parse the project from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; + } + + /** + * Parse the location from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; + } + + /** + * Parse the workflow from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.migrationServiceStub && !this._terminated) { + return this.migrationServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v2/src/v2/migration_service_client_config.json b/owl-bot-staging/v2/src/v2/migration_service_client_config.json new file mode 100644 index 0000000..5832815 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_client_config.json @@ -0,0 +1,71 @@ +{ + "interfaces": { + "google.cloud.bigquery.migration.v2.MigrationService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 10000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationWorkflows": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "DeleteMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "GetMigrationSubtask": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ListMigrationSubtasks": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json new file mode 100644 index 0000000..57df7ab --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json @@ -0,0 +1,7 @@ +[ + "../../protos/google/cloud/bigquery/migration/v2/migration_entities.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_error_details.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_metrics.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_service.proto", + "../../protos/google/cloud/bigquery/migration/v2/translation_config.proto" +] diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js new file mode 100644 index 0000000..aa2c893 --- /dev/null +++ b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const migration = require('@google-cloud/migration'); + +function main() { + const migrationServiceClient = new migration.MigrationServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts new file mode 100644 index 0000000..0afe940 --- /dev/null +++ b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,32 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {MigrationServiceClient} from '@google-cloud/migration'; + +// check that the client class type name can be used +function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const migrationServiceClient = new MigrationServiceClient(); + doStuffWithMigrationServiceClient(migrationServiceClient); +} + +main(); diff --git a/owl-bot-staging/v2/system-test/install.ts b/owl-bot-staging/v2/system-test/install.ts new file mode 100644 index 0000000..557a575 --- /dev/null +++ b/owl-bot-staging/v2/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {packNTest} from 'pack-n-play'; +import {readFileSync} from 'fs'; +import {describe, it} from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts new file mode 100644 index 0000000..061c58c --- /dev/null +++ b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts @@ -0,0 +1,1256 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as migrationserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v2.MigrationServiceClient', () => { + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = migrationserviceModule.v2.MigrationServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = migrationserviceModule.v2.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('createMigrationWorkflow', () => { + it('invokes createMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.createMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationWorkflow', () => { + it('invokes getMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + }); + }); + + describe('deleteMigrationWorkflow', () => { + it('invokes deleteMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.deleteMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + }); + }); + + describe('startMigrationWorkflow', () => { + it('invokes startMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.startMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationSubtask', () => { + it('invokes getMigrationSubtask without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationSubtask(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationSubtask( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationSubtask with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + }); + }); + + describe('listMigrationWorkflows', () => { + it('invokes listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationWorkflows(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflows without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationWorkflows( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationWorkflows(request), expectedError); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflowsStream without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationWorkflowsStream with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; + const iterable = client.listMigrationWorkflowsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationWorkflowsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('listMigrationSubtasks', () => { + it('invokes listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationSubtasks(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasks without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationSubtasks( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationSubtasks(request), expectedError); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasksStream without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationSubtasksStream with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; + const iterable = client.listMigrationSubtasksAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationSubtasksAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationSubtask', () => { + const fakePath = "/rendered/path/migrationSubtask"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + subtask: "subtaskValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationSubtaskPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationSubtaskPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationSubtaskPath', () => { + const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationSubtaskName', () => { + const result = client.matchProjectFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationSubtaskName', () => { + const result = client.matchLocationFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationSubtaskName', () => { + const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchSubtaskFromMigrationSubtaskName', () => { + const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "subtaskValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationWorkflow', () => { + const fakePath = "/rendered/path/migrationWorkflow"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationWorkflowPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationWorkflowPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationWorkflowPath', () => { + const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationWorkflowName', () => { + const result = client.matchProjectFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationWorkflowName', () => { + const result = client.matchLocationFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationWorkflowName', () => { + const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v2/tsconfig.json b/owl-bot-staging/v2/tsconfig.json new file mode 100644 index 0000000..c78f1c8 --- /dev/null +++ b/owl-bot-staging/v2/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v2/webpack.config.js b/owl-bot-staging/v2/webpack.config.js new file mode 100644 index 0000000..9178c4b --- /dev/null +++ b/owl-bot-staging/v2/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'MigrationService', + filename: './migration-service.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; diff --git a/owl-bot-staging/v2alpha/.eslintignore b/owl-bot-staging/v2alpha/.eslintignore new file mode 100644 index 0000000..cfc348e --- /dev/null +++ b/owl-bot-staging/v2alpha/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v2alpha/.eslintrc.json b/owl-bot-staging/v2alpha/.eslintrc.json new file mode 100644 index 0000000..7821534 --- /dev/null +++ b/owl-bot-staging/v2alpha/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v2alpha/.gitignore b/owl-bot-staging/v2alpha/.gitignore new file mode 100644 index 0000000..5d32b23 --- /dev/null +++ b/owl-bot-staging/v2alpha/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v2alpha/.jsdoc.js b/owl-bot-staging/v2alpha/.jsdoc.js new file mode 100644 index 0000000..aabe555 --- /dev/null +++ b/owl-bot-staging/v2alpha/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/migration', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v2alpha/.mocharc.js b/owl-bot-staging/v2alpha/.mocharc.js new file mode 100644 index 0000000..481c522 --- /dev/null +++ b/owl-bot-staging/v2alpha/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v2alpha/.prettierrc.js b/owl-bot-staging/v2alpha/.prettierrc.js new file mode 100644 index 0000000..494e147 --- /dev/null +++ b/owl-bot-staging/v2alpha/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v2alpha/README.md b/owl-bot-staging/v2alpha/README.md new file mode 100644 index 0000000..4e0341c --- /dev/null +++ b/owl-bot-staging/v2alpha/README.md @@ -0,0 +1 @@ +Migration: Nodejs Client diff --git a/owl-bot-staging/v2alpha/linkinator.config.json b/owl-bot-staging/v2alpha/linkinator.config.json new file mode 100644 index 0000000..befd23c --- /dev/null +++ b/owl-bot-staging/v2alpha/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2alpha/package.json b/owl-bot-staging/v2alpha/package.json new file mode 100644 index 0000000..8172b89 --- /dev/null +++ b/owl-bot-staging/v2alpha/package.json @@ -0,0 +1,64 @@ +{ + "name": "@google-cloud/migration", + "version": "0.1.0", + "description": "Migration client for Node.js", + "repository": "googleapis/nodejs-migration", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google migration", + "migration", + "migration service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^3.2.0" + }, + "devDependencies": { + "@types/mocha": "^9.1.1", + "@types/node": "^16.11.50", + "@types/sinon": "^10.0.13", + "c8": "^7.12.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.11", + "jsdoc-fresh": "^2.0.0", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^4.0.2", + "mocha": "^10.0.0", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^8.4.0", + "typescript": "^4.7.4", + "webpack": "^4.46.0", + "webpack-cli": "^4.10.0" + }, + "engines": { + "node": ">=v12" + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto new file mode 100644 index 0000000..0c6ea13 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto @@ -0,0 +1,49 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "AssessmentTaskProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Assessment task config. +message AssessmentTaskDetails { + // Required. The Cloud Storage path for assessment input files. + string input_path = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The BigQuery dataset for output. + string output_dataset = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. An optional Cloud Storage path to write the query logs (which is + // then used as an input path on the translation task) + string querylogs_path = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data source or data warehouse type (eg: TERADATA/REDSHIFT) + // from which the input data is extracted. + string data_source = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// Details for an assessment task orchestration result. +message AssessmentOrchestrationResultDetails { + // Optional. The version used for the output table schemas. + string output_tables_schema_version = 1 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto new file mode 100644 index 0000000..50d4c75 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto @@ -0,0 +1,244 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/assessment_task.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/cloud/bigquery/migration/v2alpha/translation_task.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // The details of the task. + oneof task_details { + // Task configuration for Assessment. + AssessmentTaskDetails assessment_task_details = 12; + + // Task configuration for Batch/Offline SQL Translation. + TranslationTaskDetails translation_task_details = 13; + } + + // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be a supported task type. + string type = 2; + + // DEPRECATED! Use one of the task_details below. + // The details of the task. The type URL must be one of the supported task + // details messages and correspond to the Task's type. + google.protobuf.Any details = 3; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; + + // Output only. Additional information about the orchestration. + MigrationTaskOrchestrationResult orchestration_result = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while processing the + // subtask. Presence of error details does not mean that the subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} + +// Additional information from the orchestrator when it is done with the +// task orchestration. +message MigrationTaskOrchestrationResult { + // Details specific to the task type. + oneof details { + // Details specific to assessment task types. + AssessmentOrchestrationResultDetails assessment_details = 1; + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto new file mode 100644 index 0000000..89dac5e --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto new file mode 100644 index 0000000..ce60dd2 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately +/-9.2x10^18. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately +/-10^(+/-300) and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto new file mode 100644 index 0000000..9a184a1 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto @@ -0,0 +1,247 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2alpha/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto new file mode 100644 index 0000000..bf4b27e --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto @@ -0,0 +1,207 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "TranslationTaskProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Mapping between an input and output file to be translated in a subtask. +message TranslationFileMapping { + // The Cloud Storage path for a file to translation in a subtask. + string input_path = 1; + + // The Cloud Storage path to write back the corresponding input file to. + string output_path = 2; +} + +// The translation task config to capture necessary settings for a translation +// task and subtask. +message TranslationTaskDetails { + // The file encoding types. + enum FileEncoding { + // File encoding setting is not specified. + FILE_ENCODING_UNSPECIFIED = 0; + + // File encoding is UTF_8. + UTF_8 = 1; + + // File encoding is ISO_8859_1. + ISO_8859_1 = 2; + + // File encoding is US_ASCII. + US_ASCII = 3; + + // File encoding is UTF_16. + UTF_16 = 4; + + // File encoding is UTF_16LE. + UTF_16LE = 5; + + // File encoding is UTF_16BE. + UTF_16BE = 6; + } + + // The special token data type. + enum TokenType { + // Token type is not specified. + TOKEN_TYPE_UNSPECIFIED = 0; + + // Token type as string. + STRING = 1; + + // Token type as integer. + INT64 = 2; + + // Token type as numeric. + NUMERIC = 3; + + // Token type as boolean. + BOOL = 4; + + // Token type as float. + FLOAT64 = 5; + + // Token type as date. + DATE = 6; + + // Token type as timestamp. + TIMESTAMP = 7; + } + + // The language specific settings for the translation task. + oneof language_options { + // The Teradata SQL specific settings for the translation task. + TeradataOptions teradata_options = 10; + + // The BTEQ specific settings for the translation task. + BteqOptions bteq_options = 11; + } + + // The Cloud Storage path for translation input files. + string input_path = 1; + + // The Cloud Storage path for translation output files. + string output_path = 2; + + // Cloud Storage files to be processed for translation. + repeated TranslationFileMapping file_paths = 12; + + // The Cloud Storage path to DDL files as table schema to assist semantic + // translation. + string schema_path = 3; + + // The file encoding type. + FileEncoding file_encoding = 4; + + // The settings for SQL identifiers. + IdentifierSettings identifier_settings = 5; + + // The map capturing special tokens to be replaced during translation. The key + // is special token in string. The value is the token data type. This is used + // to translate SQL query template which contains special token as place + // holder. The special token makes a query invalid to parse. This map will be + // applied to annotate those special token with types to let parser understand + // how to parse them into proper structure with type information. + map special_token_map = 6; + + // The filter applied to translation details. + Filter filter = 7; + + // Specifies the exact name of the bigquery table ("dataset.table") to be used + // for surfacing raw translation errors. If the table does not exist, we will + // create it. If it already exists and the schema is the same, we will re-use. + // If the table exists and the schema is different, we will throw an error. + string translation_exception_table = 13; +} + +// The filter applied to fields of translation details. +message Filter { + // The list of prefixes used to exclude processing for input files. + repeated string input_file_exclusion_prefixes = 1; +} + +// Settings related to SQL identifiers. +message IdentifierSettings { + // The identifier case type. + enum IdentifierCase { + // The identifier case is not specified. + IDENTIFIER_CASE_UNSPECIFIED = 0; + + // Identifiers' cases will be kept as the original cases. + ORIGINAL = 1; + + // Identifiers will be in upper cases. + UPPER = 2; + + // Identifiers will be in lower cases. + LOWER = 3; + } + + // The SQL identifier rewrite mode. + enum IdentifierRewriteMode { + // SQL Identifier rewrite mode is unspecified. + IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0; + + // SQL identifiers won't be rewrite. + NONE = 1; + + // All SQL identifiers will be rewrite. + REWRITE_ALL = 2; + } + + // The setting to control output queries' identifier case. + IdentifierCase output_identifier_case = 1; + + // Specifies the rewrite mode for SQL identifiers. + IdentifierRewriteMode identifier_rewrite_mode = 2; +} + +// Teradata SQL specific translation task related settings. +message TeradataOptions { + +} + +// BTEQ translation task related settings. +message BteqOptions { + // Specifies the project and dataset in BigQuery that will be used for + // external table creation during the translation. + DatasetReference project_dataset = 1; + + // The Cloud Storage location to be used as the default path for files that + // are not otherwise specified in the file replacement map. + string default_path_uri = 2; + + // Maps the local paths that are used in BTEQ scripts (the keys) to the paths + // in Cloud Storage that should be used in their stead in the translation (the + // value). + map file_replacement_map = 3; +} + +// Reference to a BigQuery dataset. +message DatasetReference { + // A unique ID for this dataset, without the project name. The ID + // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). + // The maximum length is 1,024 characters. + string dataset_id = 1; + + // The ID of the project containing this dataset. + string project_id = 2; +} diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js new file mode 100644 index 0000000..3c13323 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js @@ -0,0 +1,67 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, migrationWorkflow) { + // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + */ + // const parent = 'abc123' + /** + * Required. The migration workflow to create. + */ + // const migrationWorkflow = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callCreateMigrationWorkflow() { + // Construct request + const request = { + parent, + migrationWorkflow, + }; + + // Run request + const response = await migrationClient.createMigrationWorkflow(request); + console.log(response); + } + + callCreateMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js new file mode 100644 index 0000000..1637924 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callDeleteMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.deleteMigrationWorkflow(request); + console.log(response); + } + + callDeleteMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js new file mode 100644 index 0000000..be42b52 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + */ + // const name = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationSubtask() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationSubtask(request); + console.log(response); + } + + callGetMigrationSubtask(); + // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js new file mode 100644 index 0000000..f3ef8c0 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationWorkflow(request); + console.log(response); + } + + callGetMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js new file mode 100644 index 0000000..4b1e2a7 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js @@ -0,0 +1,86 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const parent = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + /** + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + /** + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + */ + // const filter = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationSubtasks() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationSubtasksAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationSubtasks(); + // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js new file mode 100644 index 0000000..0b71d06 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js @@ -0,0 +1,80 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + */ + // const parent = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + /** + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationWorkflows() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationWorkflowsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationWorkflows(); + // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js new file mode 100644 index 0000000..bcdcbbf --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callStartMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.startMigrationWorkflow(request); + console.log(response); + } + + callStartMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json new file mode 100644 index 0000000..c5336c2 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json @@ -0,0 +1,335 @@ +{ + "clientLibrary": { + "name": "nodejs-migration", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2alpha", + "version": "v2alpha" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", + "title": "MigrationService createMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Creates a migration workflow.", + "canonical": true, + "file": "migration_service.create_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 59, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "migration_workflow", + "type": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", + "title": "MigrationService getMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration workflow.", + "canonical": true, + "file": "migration_service.get_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", + "title": "MigrationService listMigrationWorkflows Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration workflow.", + "canonical": true, + "file": "migration_service.list_migration_workflows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 72, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", + "title": "MigrationService deleteMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Deletes a migration workflow by name.", + "canonical": true, + "file": "migration_service.delete_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", + "title": "MigrationService startMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", + "canonical": true, + "file": "migration_service.start_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", + "title": "MigrationService getMigrationSubtask Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration subtask.", + "canonical": true, + "file": "migration_service.get_migration_subtask.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationSubtask", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", + "title": "MigrationService listMigrationSubtasks Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration subtasks.", + "canonical": true, + "file": "migration_service.list_migration_subtasks.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 78, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v2alpha/src/index.ts b/owl-bot-staging/v2alpha/src/index.ts new file mode 100644 index 0000000..288e629 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v2alpha from './v2alpha'; +const MigrationServiceClient = v2alpha.MigrationServiceClient; +type MigrationServiceClient = v2alpha.MigrationServiceClient; +export {v2alpha, MigrationServiceClient}; +export default {v2alpha, MigrationServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json new file mode 100644 index 0000000..f751ba9 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json @@ -0,0 +1,101 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.migration.v2alpha", + "libraryPackage": "@google-cloud/migration", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/index.ts b/owl-bot-staging/v2alpha/src/v2alpha/index.ts new file mode 100644 index 0000000..f75d208 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts new file mode 100644 index 0000000..0fbbe62 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts @@ -0,0 +1,1246 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; + +import {Transform} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v2alpha/migration_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './migration_service_client_config.json'; + +const version = require('../../../package.json').version; + +/** + * Service to handle EDW migrations. + * @class + * @memberof v2alpha + */ +export class MigrationServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + migrationServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MigrationServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof MigrationServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' + ), + migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listMigrationWorkflows: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), + listMigrationSubtasks: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.migration.v2alpha.MigrationService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.migrationServiceStub) { + return this.migrationServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.migration.v2alpha.MigrationService. + this.migrationServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2alpha.MigrationService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.migration.v2alpha.MigrationService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const migrationServiceStubMethods = + ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; + for (const methodName of migrationServiceStubMethods) { + const callPromise = this.migrationServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.migrationServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates a migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param {google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} request.migrationWorkflow + * Required. The migration workflow to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.create_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async + */ + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.get_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async + */ + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationWorkflow(request, options, callback); + } +/** + * Deletes a migration workflow by name. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.delete_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async + */ + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); + } +/** + * Starts a previously created migration workflow. I.e., the state transitions + * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + * An error will be signaled if the state is anything other than DRAFT or + * RUNNING. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.start_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async + */ + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.startMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration subtask. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.get_migration_subtask.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async + */ + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationSubtask(request, options, callback); + } + + /** + * Lists previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse + ]>; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationWorkflows(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflowsStream( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.createStream( + this.innerApiCalls.listMigrationWorkflows as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationWorkflows`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.list_migration_workflows.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async + */ + listMigrationWorkflowsAsync( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.asyncIterate( + this.innerApiCalls['listMigrationWorkflows'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + /** + * Lists previously created migration subtasks. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse + ]>; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationSubtasks(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasksStream( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.createStream( + this.innerApiCalls.listMigrationSubtasks as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationSubtasks`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.list_migration_subtasks.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async + */ + listMigrationSubtasksAsync( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.asyncIterate( + this.innerApiCalls['listMigrationSubtasks'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified migrationSubtask resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @param {string} subtask + * @returns {string} Resource name string. + */ + migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { + return this.pathTemplates.migrationSubtaskPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + subtask: subtask, + }); + } + + /** + * Parse the project from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; + } + + /** + * Parse the location from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; + } + + /** + * Parse the workflow from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; + } + + /** + * Parse the subtask from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the subtask. + */ + matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; + } + + /** + * Return a fully-qualified migrationWorkflow resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @returns {string} Resource name string. + */ + migrationWorkflowPath(project:string,location:string,workflow:string) { + return this.pathTemplates.migrationWorkflowPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + }); + } + + /** + * Parse the project from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; + } + + /** + * Parse the location from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; + } + + /** + * Parse the workflow from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.migrationServiceStub && !this._terminated) { + return this.migrationServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json new file mode 100644 index 0000000..2184b83 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json @@ -0,0 +1,73 @@ +{ + "interfaces": { + "google.cloud.bigquery.migration.v2alpha.MigrationService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 10000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetMigrationWorkflow": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationWorkflows": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "DeleteMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartMigrationWorkflow": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "GetMigrationSubtask": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationSubtasks": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + } + } + } + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json new file mode 100644 index 0000000..8e91e42 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json @@ -0,0 +1,8 @@ +[ + "../../protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto" +] diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js new file mode 100644 index 0000000..aa2c893 --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const migration = require('@google-cloud/migration'); + +function main() { + const migrationServiceClient = new migration.MigrationServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts new file mode 100644 index 0000000..0afe940 --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,32 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {MigrationServiceClient} from '@google-cloud/migration'; + +// check that the client class type name can be used +function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const migrationServiceClient = new MigrationServiceClient(); + doStuffWithMigrationServiceClient(migrationServiceClient); +} + +main(); diff --git a/owl-bot-staging/v2alpha/system-test/install.ts b/owl-bot-staging/v2alpha/system-test/install.ts new file mode 100644 index 0000000..557a575 --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {packNTest} from 'pack-n-play'; +import {readFileSync} from 'fs'; +import {describe, it} from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts new file mode 100644 index 0000000..99aac57 --- /dev/null +++ b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts @@ -0,0 +1,1256 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as migrationserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v2alpha.MigrationServiceClient', () => { + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('createMigrationWorkflow', () => { + it('invokes createMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.createMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationWorkflow', () => { + it('invokes getMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + }); + }); + + describe('deleteMigrationWorkflow', () => { + it('invokes deleteMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.deleteMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + }); + }); + + describe('startMigrationWorkflow', () => { + it('invokes startMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.startMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationSubtask', () => { + it('invokes getMigrationSubtask without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationSubtask(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationSubtask( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationSubtask with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + }); + }); + + describe('listMigrationWorkflows', () => { + it('invokes listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationWorkflows(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflows without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationWorkflows( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationWorkflows(request), expectedError); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflowsStream without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationWorkflowsStream with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; + const iterable = client.listMigrationWorkflowsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationWorkflowsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('listMigrationSubtasks', () => { + it('invokes listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationSubtasks(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasks without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationSubtasks( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationSubtasks(request), expectedError); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasksStream without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationSubtasksStream with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; + const iterable = client.listMigrationSubtasksAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationSubtasksAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationSubtask', () => { + const fakePath = "/rendered/path/migrationSubtask"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + subtask: "subtaskValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationSubtaskPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationSubtaskPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationSubtaskPath', () => { + const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationSubtaskName', () => { + const result = client.matchProjectFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationSubtaskName', () => { + const result = client.matchLocationFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationSubtaskName', () => { + const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchSubtaskFromMigrationSubtaskName', () => { + const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "subtaskValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationWorkflow', () => { + const fakePath = "/rendered/path/migrationWorkflow"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationWorkflowPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationWorkflowPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationWorkflowPath', () => { + const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationWorkflowName', () => { + const result = client.matchProjectFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationWorkflowName', () => { + const result = client.matchLocationFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationWorkflowName', () => { + const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v2alpha/tsconfig.json b/owl-bot-staging/v2alpha/tsconfig.json new file mode 100644 index 0000000..c78f1c8 --- /dev/null +++ b/owl-bot-staging/v2alpha/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v2alpha/webpack.config.js b/owl-bot-staging/v2alpha/webpack.config.js new file mode 100644 index 0000000..9178c4b --- /dev/null +++ b/owl-bot-staging/v2alpha/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'MigrationService', + filename: './migration-service.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; From 77d3998ef25a7cd0fa2659d4f74dc7f628f6ac22 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Sat, 20 Aug 2022 01:15:23 +0000 Subject: [PATCH 06/13] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- owl-bot-staging/v2/.eslintignore | 7 - owl-bot-staging/v2/.eslintrc.json | 3 - owl-bot-staging/v2/.gitignore | 14 - owl-bot-staging/v2/.jsdoc.js | 55 - owl-bot-staging/v2/.mocharc.js | 33 - owl-bot-staging/v2/.prettierrc.js | 22 - owl-bot-staging/v2/README.md | 1 - owl-bot-staging/v2/linkinator.config.json | 16 - owl-bot-staging/v2/package.json | 64 - .../migration/v2/migration_entities.proto | 233 --- .../v2/migration_error_details.proto | 62 - .../migration/v2/migration_metrics.proto | 111 -- .../migration/v2/migration_service.proto | 245 ---- .../migration/v2/translation_config.proto | 257 ---- ...ation_service.create_migration_workflow.js | 67 - ...ation_service.delete_migration_workflow.js | 62 - ...migration_service.get_migration_subtask.js | 66 - ...igration_service.get_migration_workflow.js | 66 - ...gration_service.list_migration_subtasks.js | 86 -- ...ration_service.list_migration_workflows.js | 80 -- ...ration_service.start_migration_workflow.js | 62 - ...ta.google.cloud.bigquery.migration.v2.json | 335 ----- owl-bot-staging/v2/src/index.ts | 25 - owl-bot-staging/v2/src/v2/gapic_metadata.json | 101 -- owl-bot-staging/v2/src/v2/index.ts | 19 - .../v2/src/v2/migration_service_client.ts | 1246 ---------------- .../v2/migration_service_client_config.json | 71 - .../src/v2/migration_service_proto_list.json | 7 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 32 - owl-bot-staging/v2/system-test/install.ts | 49 - .../v2/test/gapic_migration_service_v2.ts | 1256 ----------------- owl-bot-staging/v2/tsconfig.json | 19 - owl-bot-staging/v2/webpack.config.js | 64 - owl-bot-staging/v2alpha/.eslintignore | 7 - owl-bot-staging/v2alpha/.eslintrc.json | 3 - owl-bot-staging/v2alpha/.gitignore | 14 - owl-bot-staging/v2alpha/.jsdoc.js | 55 - owl-bot-staging/v2alpha/.mocharc.js | 33 - owl-bot-staging/v2alpha/.prettierrc.js | 22 - owl-bot-staging/v2alpha/README.md | 1 - .../v2alpha/linkinator.config.json | 16 - owl-bot-staging/v2alpha/package.json | 64 - .../migration/v2alpha/assessment_task.proto | 49 - .../v2alpha/migration_entities.proto | 244 ---- .../v2alpha/migration_error_details.proto | 62 - .../migration/v2alpha/migration_metrics.proto | 111 -- .../migration/v2alpha/migration_service.proto | 247 ---- .../migration/v2alpha/translation_task.proto | 207 --- ...ation_service.create_migration_workflow.js | 67 - ...ation_service.delete_migration_workflow.js | 62 - ...migration_service.get_migration_subtask.js | 66 - ...igration_service.get_migration_workflow.js | 66 - ...gration_service.list_migration_subtasks.js | 86 -- ...ration_service.list_migration_workflows.js | 80 -- ...ration_service.start_migration_workflow.js | 62 - ...ogle.cloud.bigquery.migration.v2alpha.json | 335 ----- owl-bot-staging/v2alpha/src/index.ts | 25 - .../v2alpha/src/v2alpha/gapic_metadata.json | 101 -- owl-bot-staging/v2alpha/src/v2alpha/index.ts | 19 - .../src/v2alpha/migration_service_client.ts | 1246 ---------------- .../migration_service_client_config.json | 73 - .../v2alpha/migration_service_proto_list.json | 8 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 32 - .../v2alpha/system-test/install.ts | 49 - .../test/gapic_migration_service_v2alpha.ts | 1256 ----------------- owl-bot-staging/v2alpha/tsconfig.json | 19 - owl-bot-staging/v2alpha/webpack.config.js | 64 - ...ation_service.create_migration_workflow.js | 3 + ...ation_service.delete_migration_workflow.js | 3 + ...migration_service.get_migration_subtask.js | 3 + ...igration_service.get_migration_workflow.js | 3 + ...gration_service.list_migration_subtasks.js | 3 + ...ration_service.list_migration_workflows.js | 3 + ...ration_service.start_migration_workflow.js | 3 + ...ta.google.cloud.bigquery.migration.v2.json | 14 +- ...ation_service.create_migration_workflow.js | 3 + ...ation_service.delete_migration_workflow.js | 3 + ...migration_service.get_migration_subtask.js | 3 + ...igration_service.get_migration_workflow.js | 3 + ...gration_service.list_migration_subtasks.js | 3 + ...ration_service.list_migration_workflows.js | 3 + ...ration_service.start_migration_workflow.js | 3 + ...ogle.cloud.bigquery.migration.v2alpha.json | 14 +- src/v2/migration_service_client.ts | 12 +- src/v2alpha/migration_service_client.ts | 12 +- test/gapic_migration_service_v2.ts | 160 +-- test/gapic_migration_service_v2alpha.ts | 161 +-- 89 files changed, 231 insertions(+), 9925 deletions(-) delete mode 100644 owl-bot-staging/v2/.eslintignore delete mode 100644 owl-bot-staging/v2/.eslintrc.json delete mode 100644 owl-bot-staging/v2/.gitignore delete mode 100644 owl-bot-staging/v2/.jsdoc.js delete mode 100644 owl-bot-staging/v2/.mocharc.js delete mode 100644 owl-bot-staging/v2/.prettierrc.js delete mode 100644 owl-bot-staging/v2/README.md delete mode 100644 owl-bot-staging/v2/linkinator.config.json delete mode 100644 owl-bot-staging/v2/package.json delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json delete mode 100644 owl-bot-staging/v2/src/index.ts delete mode 100644 owl-bot-staging/v2/src/v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/src/v2/index.ts delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_client.ts delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_client_config.json delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_proto_list.json delete mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v2/system-test/install.ts delete mode 100644 owl-bot-staging/v2/test/gapic_migration_service_v2.ts delete mode 100644 owl-bot-staging/v2/tsconfig.json delete mode 100644 owl-bot-staging/v2/webpack.config.js delete mode 100644 owl-bot-staging/v2alpha/.eslintignore delete mode 100644 owl-bot-staging/v2alpha/.eslintrc.json delete mode 100644 owl-bot-staging/v2alpha/.gitignore delete mode 100644 owl-bot-staging/v2alpha/.jsdoc.js delete mode 100644 owl-bot-staging/v2alpha/.mocharc.js delete mode 100644 owl-bot-staging/v2alpha/.prettierrc.js delete mode 100644 owl-bot-staging/v2alpha/README.md delete mode 100644 owl-bot-staging/v2alpha/linkinator.config.json delete mode 100644 owl-bot-staging/v2alpha/package.json delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json delete mode 100644 owl-bot-staging/v2alpha/src/index.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/index.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json delete mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v2alpha/system-test/install.ts delete mode 100644 owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts delete mode 100644 owl-bot-staging/v2alpha/tsconfig.json delete mode 100644 owl-bot-staging/v2alpha/webpack.config.js diff --git a/owl-bot-staging/v2/.eslintignore b/owl-bot-staging/v2/.eslintignore deleted file mode 100644 index cfc348e..0000000 --- a/owl-bot-staging/v2/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v2/.eslintrc.json b/owl-bot-staging/v2/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/owl-bot-staging/v2/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v2/.gitignore b/owl-bot-staging/v2/.gitignore deleted file mode 100644 index 5d32b23..0000000 --- a/owl-bot-staging/v2/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v2/.jsdoc.js b/owl-bot-staging/v2/.jsdoc.js deleted file mode 100644 index aabe555..0000000 --- a/owl-bot-staging/v2/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/migration', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v2/.mocharc.js b/owl-bot-staging/v2/.mocharc.js deleted file mode 100644 index 481c522..0000000 --- a/owl-bot-staging/v2/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v2/.prettierrc.js b/owl-bot-staging/v2/.prettierrc.js deleted file mode 100644 index 494e147..0000000 --- a/owl-bot-staging/v2/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v2/README.md b/owl-bot-staging/v2/README.md deleted file mode 100644 index 4e0341c..0000000 --- a/owl-bot-staging/v2/README.md +++ /dev/null @@ -1 +0,0 @@ -Migration: Nodejs Client diff --git a/owl-bot-staging/v2/linkinator.config.json b/owl-bot-staging/v2/linkinator.config.json deleted file mode 100644 index befd23c..0000000 --- a/owl-bot-staging/v2/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v2/package.json b/owl-bot-staging/v2/package.json deleted file mode 100644 index 8172b89..0000000 --- a/owl-bot-staging/v2/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "@google-cloud/migration", - "version": "0.1.0", - "description": "Migration client for Node.js", - "repository": "googleapis/nodejs-migration", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google migration", - "migration", - "migration service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^3.2.0" - }, - "devDependencies": { - "@types/mocha": "^9.1.1", - "@types/node": "^16.11.50", - "@types/sinon": "^10.0.13", - "c8": "^7.12.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.11", - "jsdoc-fresh": "^2.0.0", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^4.0.2", - "mocha": "^10.0.0", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^8.4.0", - "typescript": "^4.7.4", - "webpack": "^4.46.0", - "webpack-cli": "^4.10.0" - }, - "engines": { - "node": ">=v12" - } -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto deleted file mode 100644 index 7d77bae..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto +++ /dev/null @@ -1,233 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2/migration_metrics.proto"; -import "google/cloud/bigquery/migration/v2/translation_config.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationEntitiesProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// A migration workflow which specifies what needs to be done for an EDW -// migration. -message MigrationWorkflow { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}" - }; - - // Possible migration workflow states. - enum State { - // Workflow state is unspecified. - STATE_UNSPECIFIED = 0; - - // Workflow is in draft status, i.e. tasks are not yet eligible for - // execution. - DRAFT = 1; - - // Workflow is running (i.e. tasks are eligible for execution). - RUNNING = 2; - - // Workflow is paused. Tasks currently in progress may continue, but no - // further tasks will be scheduled. - PAUSED = 3; - - // Workflow is complete. There should not be any task in a non-terminal - // state, but if they are (e.g. forced termination), they will not be - // scheduled. - COMPLETED = 4; - } - - // Output only. Immutable. The unique identifier for the migration workflow. - // The ID is server-generated. - // - // Example: `projects/123/locations/us/workflows/345` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The display name of the workflow. This can be set to give a workflow - // a descriptive name. There is no guarantee or enforcement of uniqueness. - string display_name = 6; - - // The tasks in a workflow in a named map. The name (i.e. key) has no - // meaning and is merely a convenient way to address a specific task - // in a workflow. - map tasks = 2; - - // Output only. That status of the workflow. - State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the workflow was created. - google.protobuf.Timestamp create_time = 4; - - // Time when the workflow was last updated. - google.protobuf.Timestamp last_update_time = 5; -} - -// A single task for a migration which has details about the configuration of -// the task. -message MigrationTask { - // Possible states of a migration task. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The task is waiting for orchestration. - PENDING = 1; - - // The task is assigned to an orchestrator. - ORCHESTRATING = 2; - - // The task is running, i.e. its subtasks are ready for execution. - RUNNING = 3; - - // Tha task is paused. Assigned subtasks can continue, but no new subtasks - // will be scheduled. - PAUSED = 4; - - // The task finished successfully. - SUCCEEDED = 5; - - // The task finished unsuccessfully. - FAILED = 6; - } - - // The details of the task. - oneof task_details { - // Task configuration for Batch SQL Translation. - TranslationConfigDetails translation_config_details = 14; - } - - // Output only. Immutable. The unique identifier for the migration task. The - // ID is server-generated. - string id = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The type of the task. This must be one of the supported task types: - // Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ, - // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, - // Translation_Snowflake2BQ, Translation_Netezza2BQ, - // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, - // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. - string type = 2; - - // Output only. The current state of the task. - State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in - // FAILED state. - google.rpc.ErrorInfo processing_error = 5 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the task was created. - google.protobuf.Timestamp create_time = 6; - - // Time when the task was last updated. - google.protobuf.Timestamp last_update_time = 7; -} - -// A subtask for a migration which carries details about the configuration of -// the subtask. The content of the details should not matter to the end user, -// but is a contract between the subtask creator and subtask worker. -message MigrationSubtask { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" - }; - - // Possible states of a migration subtask. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The subtask is ready, i.e. it is ready for execution. - ACTIVE = 1; - - // The subtask is running, i.e. it is assigned to a worker for execution. - RUNNING = 2; - - // The subtask finished successfully. - SUCCEEDED = 3; - - // The subtask finished unsuccessfully. - FAILED = 4; - - // The subtask is paused, i.e., it will not be scheduled. If it was already - // assigned,it might still finish but no new lease renewals will be granted. - PAUSED = 5; - } - - // Output only. Immutable. The resource name for the migration subtask. The ID - // is server-generated. - // - // Example: `projects/123/locations/us/workflows/345/subtasks/678` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The unique ID of the task to which this subtask belongs. - string task_id = 2; - - // The type of the Subtask. The migration service does not check whether this - // is a known type. It is up to the task creator (i.e. orchestrator or worker) - // to ensure it only creates subtasks for which there are compatible workers - // polling for Subtasks. - string type = 3; - - // Output only. The current state of the subtask. - State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in - // FAILED state. - google.rpc.ErrorInfo processing_error = 6 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Provides details to errors and issues encountered while - // processing the subtask. Presence of error details does not mean that the - // subtask failed. - repeated ResourceErrorDetail resource_error_details = 12 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The number or resources with errors. Note: This is not the total - // number of errors as each resource can have more than one error. - // This is used to indicate truncation by having a `resource_error_count` - // that is higher than the size of `resource_error_details`. - int32 resource_error_count = 13; - - // Time when the subtask was created. - google.protobuf.Timestamp create_time = 7; - - // Time when the subtask was last updated. - google.protobuf.Timestamp last_update_time = 8; - - // The metrics for the subtask. - repeated TimeSeries metrics = 11; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto deleted file mode 100644 index 199e2db..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/field_behavior.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationErrorDetailsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// Provides details for errors and the corresponding resources. -message ResourceErrorDetail { - // Required. Information about the resource where the error is located. - google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The error details for the resource. - repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. How many errors there are in total for the resource. Truncation can be - // indicated by having an `error_count` that is higher than the size of - // `error_details`. - int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Provides details for errors, e.g. issues that where encountered when -// processing a subtask. -message ErrorDetail { - // Optional. The exact location within the resource (if applicable). - ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Describes the cause of the error with structured detail. - google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Holds information about where the error is located. -message ErrorLocation { - // Optional. If applicable, denotes the line where the error occurred. A zero value - // means that there is no line information. - int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If applicable, denotes the column where the error occurred. A zero value - // means that there is no columns information. - int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto deleted file mode 100644 index e52fead..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/distribution.proto"; -import "google/api/field_behavior.proto"; -import "google/api/metric.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationMetricsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// The metrics object for a SubTask. -message TimeSeries { - // Required. The name of the metric. - // - // If the metric is not known by the service yet, it will be auto-created. - string metric = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The value type of the time series. - google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric kind of the time series. - // - // If present, it must be the same as the metric kind of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // this field specifies the metric kind of the new descriptor and must be - // either `GAUGE` (the default) or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time does not need to be supplied, but if it is supplied, it must - // equal the end time. For `DELTA` metrics, the start and end time should - // specify a non-zero interval, with subsequent points specifying contiguous - // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end - // time should specify a non-zero interval, with subsequent points specifying - // the same start time and increasing end times, until an event resets the - // cumulative value to zero and sets a new start time for the following - // points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A time interval extending just after a start time through an end time. -// If the start time is the same as the end time, then the interval -// represents a single point in time. -message TimeInterval { - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately `+/-9.2x10^18`. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately `+/-10^(+/-300)` and it has 16 significant digits of - // precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto deleted file mode 100644 index 3c1a89e..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto +++ /dev/null @@ -1,245 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2/migration_entities.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationServiceProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// Service to handle EDW migrations. -service MigrationService { - option (google.api.default_host) = "bigquerymigration.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a migration workflow. - rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - post: "/v2/{parent=projects/*/locations/*}/workflows" - body: "migration_workflow" - }; - option (google.api.method_signature) = "parent,migration_workflow"; - } - - // Gets a previously created migration workflow. - rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - get: "/v2/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration workflow. - rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { - option (google.api.http) = { - get: "/v2/{parent=projects/*/locations/*}/workflows" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a migration workflow by name. - rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Starts a previously created migration workflow. I.e., the state transitions - // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - // An error will be signaled if the state is anything other than DRAFT or - // RUNNING. - rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2/{name=projects/*/locations/*/workflows/*}:start" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Gets a previously created migration subtask. - rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { - option (google.api.http) = { - get: "/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration subtasks. - rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { - option (google.api.http) = { - get: "/v2/{parent=projects/*/locations/*/workflows/*}/subtasks" - }; - option (google.api.method_signature) = "parent"; - } -} - -// Request to create a migration workflow resource. -message CreateMigrationWorkflowRequest { - // Required. The name of the project to which this migration workflow belongs. - // Example: `projects/foo/locations/bar` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // Required. The migration workflow to create. - MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to get a previously created migration workflow. -message GetMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; -} - -// A request to list previously created migration workflows. -message ListMigrationWorkflowsRequest { - // Required. The project and location of the migration workflows to list. - // Example: `projects/123/locations/us` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; - - // The maximum number of migration workflows to return. The service may return - // fewer than this number. - int32 page_size = 3; - - // A page token, received from previous `ListMigrationWorkflows` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationWorkflows` - // must match the call that provided the page token. - string page_token = 4; -} - -// Response object for a `ListMigrationWorkflows` call. -message ListMigrationWorkflowsResponse { - // The migration workflows for the specified project / location. - repeated MigrationWorkflow migration_workflows = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a previously created migration workflow. -message DeleteMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to start a previously created migration workflow. -message StartMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to get a previously created migration subtasks. -message GetMigrationSubtaskRequest { - // Required. The unique identifier for the migration subtask. - // Example: `projects/123/locations/us/workflows/1234/subtasks/543` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to list previously created migration subtasks. -message ListMigrationSubtasksRequest { - // Required. The migration task of the subtasks to list. - // Example: `projects/123/locations/us/workflows/1234` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of migration tasks to return. The service may return - // fewer than this number. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token, received from previous `ListMigrationSubtasks` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationSubtasks` - // must match the call that provided the page token. - string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The filter to apply. This can be used to get the subtasks of a specific - // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - // task ID (not the name in the named map). - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response object for a `ListMigrationSubtasks` call. -message ListMigrationSubtasksResponse { - // The migration subtasks for the specified task. - repeated MigrationSubtask migration_subtasks = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto deleted file mode 100644 index 994140d..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto +++ /dev/null @@ -1,257 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "TranslationConfigProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// The translation config to capture necessary settings for a translation task -// and subtask. -message TranslationConfigDetails { - // The chosen path where the source for input files will be found. - oneof source_location { - // The Cloud Storage path for a directory of files to translate in a task. - string gcs_source_path = 1; - } - - // The chosen path where the destination for output files will be found. - oneof target_location { - // The Cloud Storage path to write back the corresponding input files to. - string gcs_target_path = 2; - } - - // The dialect of the input files. - Dialect source_dialect = 3; - - // The target dialect for the engine to translate the input to. - Dialect target_dialect = 4; - - // The mapping of full SQL object names from their current state to the - // desired output. - oneof output_name_mapping { - // The mapping of objects to their desired output names in list form. - ObjectNameMappingList name_mapping_list = 5; - } - - // The default source environment values for the translation. - SourceEnv source_env = 6; -} - -// The possible dialect options for translation. -message Dialect { - // The possible dialect options that this message represents. - oneof dialect_value { - // The BigQuery dialect - BigQueryDialect bigquery_dialect = 1; - - // The HiveQL dialect - HiveQLDialect hiveql_dialect = 2; - - // The Redshift dialect - RedshiftDialect redshift_dialect = 3; - - // The Teradata dialect - TeradataDialect teradata_dialect = 4; - - // The Oracle dialect - OracleDialect oracle_dialect = 5; - - // The SparkSQL dialect - SparkSQLDialect sparksql_dialect = 6; - - // The Snowflake dialect - SnowflakeDialect snowflake_dialect = 7; - - // The Netezza dialect - NetezzaDialect netezza_dialect = 8; - - // The Azure Synapse dialect - AzureSynapseDialect azure_synapse_dialect = 9; - - // The Vertica dialect - VerticaDialect vertica_dialect = 10; - - // The SQL Server dialect - SQLServerDialect sql_server_dialect = 11; - - // The Postgresql dialect - PostgresqlDialect postgresql_dialect = 12; - - // The Presto dialect - PrestoDialect presto_dialect = 13; - - // The MySQL dialect - MySQLDialect mysql_dialect = 14; - } -} - -// The dialect definition for BigQuery. -message BigQueryDialect {} - -// The dialect definition for HiveQL. -message HiveQLDialect {} - -// The dialect definition for Redshift. -message RedshiftDialect {} - -// The dialect definition for Teradata. -message TeradataDialect { - // The sub-dialect options for Teradata. - enum Mode { - // Unspecified mode. - MODE_UNSPECIFIED = 0; - - // Teradata SQL mode. - SQL = 1; - - // BTEQ mode (which includes SQL). - BTEQ = 2; - } - - // Which Teradata sub-dialect mode the user specifies. - Mode mode = 1; -} - -// The dialect definition for Oracle. -message OracleDialect {} - -// The dialect definition for SparkSQL. -message SparkSQLDialect {} - -// The dialect definition for Snowflake. -message SnowflakeDialect {} - -// The dialect definition for Netezza. -message NetezzaDialect {} - -// The dialect definition for Azure Synapse. -message AzureSynapseDialect {} - -// The dialect definition for Vertica. -message VerticaDialect {} - -// The dialect definition for SQL Server. -message SQLServerDialect {} - -// The dialect definition for Postgresql. -message PostgresqlDialect {} - -// The dialect definition for Presto. -message PrestoDialect {} - -// The dialect definition for MySQL. -message MySQLDialect {} - -// Represents a map of name mappings using a list of key:value proto messages of -// existing name to desired output name. -message ObjectNameMappingList { - // The elements of the object name map. - repeated ObjectNameMapping name_map = 1; -} - -// Represents a key-value pair of NameMappingKey to NameMappingValue to -// represent the mapping of SQL names from the input value to desired output. -message ObjectNameMapping { - // The name of the object in source that is being mapped. - NameMappingKey source = 1; - - // The desired target name of the object that is being mapped. - NameMappingValue target = 2; -} - -// The potential components of a full name mapping that will be mapped -// during translation in the source data warehouse. -message NameMappingKey { - // The type of the object that is being mapped. - enum Type { - // Unspecified name mapping type. - TYPE_UNSPECIFIED = 0; - - // The object being mapped is a database. - DATABASE = 1; - - // The object being mapped is a schema. - SCHEMA = 2; - - // The object being mapped is a relation. - RELATION = 3; - - // The object being mapped is an attribute. - ATTRIBUTE = 4; - - // The object being mapped is a relation alias. - RELATION_ALIAS = 5; - - // The object being mapped is a an attribute alias. - ATTRIBUTE_ALIAS = 6; - - // The object being mapped is a function. - FUNCTION = 7; - } - - // The type of object that is being mapped. - Type type = 1; - - // The database name (BigQuery project ID equivalent in the source data - // warehouse). - string database = 2; - - // The schema name (BigQuery dataset equivalent in the source data warehouse). - string schema = 3; - - // The relation name (BigQuery table or view equivalent in the source data - // warehouse). - string relation = 4; - - // The attribute name (BigQuery column equivalent in the source data - // warehouse). - string attribute = 5; -} - -// The potential components of a full name mapping that will be mapped -// during translation in the target data warehouse. -message NameMappingValue { - // The database name (BigQuery project ID equivalent in the target data - // warehouse). - string database = 1; - - // The schema name (BigQuery dataset equivalent in the target data warehouse). - string schema = 2; - - // The relation name (BigQuery table or view equivalent in the target data - // warehouse). - string relation = 3; - - // The attribute name (BigQuery column equivalent in the target data - // warehouse). - string attribute = 4; -} - -// Represents the default source environment values for the translation. -message SourceEnv { - // The default database name to fully qualify SQL objects when their database - // name is missing. - string default_database = 1; - - // The schema search path. When SQL objects are missing schema name, - // translation engine will search through this list to find the value. - repeated string schema_search_path = 2; -} diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js deleted file mode 100644 index 817b25b..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, migrationWorkflow) { - // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - */ - // const parent = 'abc123' - /** - * Required. The migration workflow to create. - */ - // const migrationWorkflow = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callCreateMigrationWorkflow() { - // Construct request - const request = { - parent, - migrationWorkflow, - }; - - // Run request - const response = await migrationClient.createMigrationWorkflow(request); - console.log(response); - } - - callCreateMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js deleted file mode 100644 index b3907a6..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callDeleteMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.deleteMigrationWorkflow(request); - console.log(response); - } - - callDeleteMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js deleted file mode 100644 index 949eb5a..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - */ - // const name = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationSubtask() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationSubtask(request); - console.log(response); - } - - callGetMigrationSubtask(); - // [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js deleted file mode 100644 index 39e79e3..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationWorkflow(request); - console.log(response); - } - - callGetMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js deleted file mode 100644 index 4a63df1..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const parent = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - /** - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - /** - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - */ - // const filter = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationSubtasks() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationSubtasksAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationSubtasks(); - // [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js deleted file mode 100644 index 66dc66f..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - */ - // const parent = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - /** - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationWorkflows() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationWorkflowsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationWorkflows(); - // [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js deleted file mode 100644 index 34fb75b..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callStartMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.startMigrationWorkflow(request); - console.log(response); - } - - callStartMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json deleted file mode 100644 index 81ec8bb..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json +++ /dev/null @@ -1,335 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-migration", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2", - "version": "v2" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", - "title": "MigrationService createMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Creates a migration workflow.", - "canonical": true, - "file": "migration_service.create_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 59, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "migration_workflow", - "type": ".google.cloud.bigquery.migration.v2.MigrationWorkflow" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", - "title": "MigrationService getMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration workflow.", - "canonical": true, - "file": "migration_service.get_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", - "title": "MigrationService listMigrationWorkflows Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration workflow.", - "canonical": true, - "file": "migration_service.list_migration_workflows.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 72, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", - "title": "MigrationService deleteMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Deletes a migration workflow by name.", - "canonical": true, - "file": "migration_service.delete_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", - "title": "MigrationService startMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", - "canonical": true, - "file": "migration_service.start_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", - "title": "MigrationService getMigrationSubtask Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration subtask.", - "canonical": true, - "file": "migration_service.get_migration_subtask.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationSubtask", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", - "title": "MigrationService listMigrationSubtasks Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration subtasks.", - "canonical": true, - "file": "migration_service.list_migration_subtasks.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 78, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v2/src/index.ts b/owl-bot-staging/v2/src/index.ts deleted file mode 100644 index 35a8fd9..0000000 --- a/owl-bot-staging/v2/src/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v2 from './v2'; -const MigrationServiceClient = v2.MigrationServiceClient; -type MigrationServiceClient = v2.MigrationServiceClient; -export {v2, MigrationServiceClient}; -export default {v2, MigrationServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v2/src/v2/gapic_metadata.json b/owl-bot-staging/v2/src/v2/gapic_metadata.json deleted file mode 100644 index e0aa12f..0000000 --- a/owl-bot-staging/v2/src/v2/gapic_metadata.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.migration.v2", - "libraryPackage": "@google-cloud/migration", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/src/v2/index.ts b/owl-bot-staging/v2/src/v2/index.ts deleted file mode 100644 index f75d208..0000000 --- a/owl-bot-staging/v2/src/v2/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2/src/v2/migration_service_client.ts b/owl-bot-staging/v2/src/v2/migration_service_client.ts deleted file mode 100644 index d99d7a2..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_client.ts +++ /dev/null @@ -1,1246 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; - -import {Transform} from 'stream'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v2/migration_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './migration_service_client_config.json'; - -const version = require('../../../package.json').version; - -/** - * Service to handle EDW migrations. - * @class - * @memberof v2 - */ -export class MigrationServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - migrationServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of MigrationServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof MigrationServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' - ), - migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listMigrationWorkflows: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), - listMigrationSubtasks: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.migration.v2.MigrationService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.migrationServiceStub) { - return this.migrationServiceStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.migration.v2.MigrationService. - this.migrationServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2.MigrationService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.migration.v2.MigrationService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const migrationServiceStubMethods = - ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; - for (const methodName of migrationServiceStubMethods) { - const callPromise = this.migrationServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor, - this._opts.fallback - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.migrationServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates a migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - * @param {google.cloud.bigquery.migration.v2.MigrationWorkflow} request.migrationWorkflow - * Required. The migration workflow to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.create_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async - */ - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.get_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async - */ - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationWorkflow(request, options, callback); - } -/** - * Deletes a migration workflow by name. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.delete_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async - */ - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); - } -/** - * Starts a previously created migration workflow. I.e., the state transitions - * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - * An error will be signaled if the state is anything other than DRAFT or - * RUNNING. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.start_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async - */ - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.startMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration subtask. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.get_migration_subtask.js - * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async - */ - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationSubtask(request, options, callback); - } - - /** - * Lists previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse - ]>; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationWorkflows(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflowsStream( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationWorkflows`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.list_migration_workflows.js - * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async - */ - listMigrationWorkflowsAsync( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.asyncIterate( - this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - /** - * Lists previously created migration subtasks. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse - ]>; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationSubtasks(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasksStream( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationSubtasks`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.list_migration_subtasks.js - * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async - */ - listMigrationSubtasksAsync( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.asyncIterate( - this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified migrationSubtask resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @param {string} subtask - * @returns {string} Resource name string. - */ - migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { - return this.pathTemplates.migrationSubtaskPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - subtask: subtask, - }); - } - - /** - * Parse the project from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; - } - - /** - * Parse the location from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; - } - - /** - * Parse the workflow from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; - } - - /** - * Parse the subtask from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the subtask. - */ - matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; - } - - /** - * Return a fully-qualified migrationWorkflow resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @returns {string} Resource name string. - */ - migrationWorkflowPath(project:string,location:string,workflow:string) { - return this.pathTemplates.migrationWorkflowPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - }); - } - - /** - * Parse the project from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; - } - - /** - * Parse the location from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; - } - - /** - * Parse the workflow from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.migrationServiceStub && !this._terminated) { - return this.migrationServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v2/src/v2/migration_service_client_config.json b/owl-bot-staging/v2/src/v2/migration_service_client_config.json deleted file mode 100644 index 5832815..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_client_config.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.migration.v2.MigrationService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationWorkflows": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "DeleteMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "GetMigrationSubtask": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "ListMigrationSubtasks": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json deleted file mode 100644 index 57df7ab..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/migration/v2/migration_entities.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_error_details.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_metrics.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_service.proto", - "../../protos/google/cloud/bigquery/migration/v2/translation_config.proto" -] diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js deleted file mode 100644 index aa2c893..0000000 --- a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/migration'); - -function main() { - const migrationServiceClient = new migration.MigrationServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 0afe940..0000000 --- a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {MigrationServiceClient} from '@google-cloud/migration'; - -// check that the client class type name can be used -function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const migrationServiceClient = new MigrationServiceClient(); - doStuffWithMigrationServiceClient(migrationServiceClient); -} - -main(); diff --git a/owl-bot-staging/v2/system-test/install.ts b/owl-bot-staging/v2/system-test/install.ts deleted file mode 100644 index 557a575..0000000 --- a/owl-bot-staging/v2/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {packNTest} from 'pack-n-play'; -import {readFileSync} from 'fs'; -import {describe, it} from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts deleted file mode 100644 index 061c58c..0000000 --- a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts +++ /dev/null @@ -1,1256 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import {describe, it} from 'mocha'; -import * as migrationserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v2.MigrationServiceClient', () => { - describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = migrationserviceModule.v2.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - }); - - describe('createMigrationWorkflow', () => { - it('invokes createMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.createMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationWorkflow', () => { - it('invokes getMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - }); - }); - - describe('deleteMigrationWorkflow', () => { - it('invokes deleteMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.deleteMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - }); - }); - - describe('startMigrationWorkflow', () => { - it('invokes startMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.startMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationSubtask', () => { - it('invokes getMigrationSubtask without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationSubtask(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationSubtask( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationSubtask with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - }); - }); - - describe('listMigrationWorkflows', () => { - it('invokes listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationWorkflows(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflows without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationWorkflows( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationWorkflows(request), expectedError); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflowsStream without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationWorkflowsStream with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; - const iterable = client.listMigrationWorkflowsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationWorkflowsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('listMigrationSubtasks', () => { - it('invokes listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationSubtasks(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasks without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationSubtasks( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationSubtasks(request), expectedError); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasksStream without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationSubtasksStream with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; - const iterable = client.listMigrationSubtasksAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationSubtasksAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationSubtask', () => { - const fakePath = "/rendered/path/migrationSubtask"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - subtask: "subtaskValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationSubtaskPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationSubtaskPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationSubtaskPath', () => { - const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationSubtaskName', () => { - const result = client.matchProjectFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationSubtaskName', () => { - const result = client.matchLocationFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationSubtaskName', () => { - const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchSubtaskFromMigrationSubtaskName', () => { - const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "subtaskValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationWorkflow', () => { - const fakePath = "/rendered/path/migrationWorkflow"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationWorkflowPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationWorkflowPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationWorkflowPath', () => { - const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationWorkflowName', () => { - const result = client.matchProjectFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationWorkflowName', () => { - const result = client.matchLocationFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationWorkflowName', () => { - const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v2/tsconfig.json b/owl-bot-staging/v2/tsconfig.json deleted file mode 100644 index c78f1c8..0000000 --- a/owl-bot-staging/v2/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v2/webpack.config.js b/owl-bot-staging/v2/webpack.config.js deleted file mode 100644 index 9178c4b..0000000 --- a/owl-bot-staging/v2/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'MigrationService', - filename: './migration-service.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -}; diff --git a/owl-bot-staging/v2alpha/.eslintignore b/owl-bot-staging/v2alpha/.eslintignore deleted file mode 100644 index cfc348e..0000000 --- a/owl-bot-staging/v2alpha/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v2alpha/.eslintrc.json b/owl-bot-staging/v2alpha/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/owl-bot-staging/v2alpha/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v2alpha/.gitignore b/owl-bot-staging/v2alpha/.gitignore deleted file mode 100644 index 5d32b23..0000000 --- a/owl-bot-staging/v2alpha/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v2alpha/.jsdoc.js b/owl-bot-staging/v2alpha/.jsdoc.js deleted file mode 100644 index aabe555..0000000 --- a/owl-bot-staging/v2alpha/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/migration', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v2alpha/.mocharc.js b/owl-bot-staging/v2alpha/.mocharc.js deleted file mode 100644 index 481c522..0000000 --- a/owl-bot-staging/v2alpha/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v2alpha/.prettierrc.js b/owl-bot-staging/v2alpha/.prettierrc.js deleted file mode 100644 index 494e147..0000000 --- a/owl-bot-staging/v2alpha/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v2alpha/README.md b/owl-bot-staging/v2alpha/README.md deleted file mode 100644 index 4e0341c..0000000 --- a/owl-bot-staging/v2alpha/README.md +++ /dev/null @@ -1 +0,0 @@ -Migration: Nodejs Client diff --git a/owl-bot-staging/v2alpha/linkinator.config.json b/owl-bot-staging/v2alpha/linkinator.config.json deleted file mode 100644 index befd23c..0000000 --- a/owl-bot-staging/v2alpha/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v2alpha/package.json b/owl-bot-staging/v2alpha/package.json deleted file mode 100644 index 8172b89..0000000 --- a/owl-bot-staging/v2alpha/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "@google-cloud/migration", - "version": "0.1.0", - "description": "Migration client for Node.js", - "repository": "googleapis/nodejs-migration", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google migration", - "migration", - "migration service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^3.2.0" - }, - "devDependencies": { - "@types/mocha": "^9.1.1", - "@types/node": "^16.11.50", - "@types/sinon": "^10.0.13", - "c8": "^7.12.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.11", - "jsdoc-fresh": "^2.0.0", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^4.0.2", - "mocha": "^10.0.0", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^8.4.0", - "typescript": "^4.7.4", - "webpack": "^4.46.0", - "webpack-cli": "^4.10.0" - }, - "engines": { - "node": ">=v12" - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto deleted file mode 100644 index 0c6ea13..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "AssessmentTaskProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Assessment task config. -message AssessmentTaskDetails { - // Required. The Cloud Storage path for assessment input files. - string input_path = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The BigQuery dataset for output. - string output_dataset = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. An optional Cloud Storage path to write the query logs (which is - // then used as an input path on the translation task) - string querylogs_path = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data source or data warehouse type (eg: TERADATA/REDSHIFT) - // from which the input data is extracted. - string data_source = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// Details for an assessment task orchestration result. -message AssessmentOrchestrationResultDetails { - // Optional. The version used for the output table schemas. - string output_tables_schema_version = 1 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto deleted file mode 100644 index 50d4c75..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto +++ /dev/null @@ -1,244 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2alpha/assessment_task.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; -import "google/cloud/bigquery/migration/v2alpha/translation_task.proto"; -import "google/protobuf/any.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationEntitiesProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// A migration workflow which specifies what needs to be done for an EDW -// migration. -message MigrationWorkflow { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}" - }; - - // Possible migration workflow states. - enum State { - // Workflow state is unspecified. - STATE_UNSPECIFIED = 0; - - // Workflow is in draft status, i.e. tasks are not yet eligible for - // execution. - DRAFT = 1; - - // Workflow is running (i.e. tasks are eligible for execution). - RUNNING = 2; - - // Workflow is paused. Tasks currently in progress may continue, but no - // further tasks will be scheduled. - PAUSED = 3; - - // Workflow is complete. There should not be any task in a non-terminal - // state, but if they are (e.g. forced termination), they will not be - // scheduled. - COMPLETED = 4; - } - - // Output only. Immutable. The unique identifier for the migration workflow. The ID is - // server-generated. - // - // Example: `projects/123/locations/us/workflows/345` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The display name of the workflow. This can be set to give a workflow - // a descriptive name. There is no guarantee or enforcement of uniqueness. - string display_name = 6; - - // The tasks in a workflow in a named map. The name (i.e. key) has no - // meaning and is merely a convenient way to address a specific task - // in a workflow. - map tasks = 2; - - // Output only. That status of the workflow. - State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the workflow was created. - google.protobuf.Timestamp create_time = 4; - - // Time when the workflow was last updated. - google.protobuf.Timestamp last_update_time = 5; -} - -// A single task for a migration which has details about the configuration of -// the task. -message MigrationTask { - // Possible states of a migration task. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The task is waiting for orchestration. - PENDING = 1; - - // The task is assigned to an orchestrator. - ORCHESTRATING = 2; - - // The task is running, i.e. its subtasks are ready for execution. - RUNNING = 3; - - // Tha task is paused. Assigned subtasks can continue, but no new subtasks - // will be scheduled. - PAUSED = 4; - - // The task finished successfully. - SUCCEEDED = 5; - - // The task finished unsuccessfully. - FAILED = 6; - } - - // The details of the task. - oneof task_details { - // Task configuration for Assessment. - AssessmentTaskDetails assessment_task_details = 12; - - // Task configuration for Batch/Offline SQL Translation. - TranslationTaskDetails translation_task_details = 13; - } - - // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. - string id = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The type of the task. This must be a supported task type. - string type = 2; - - // DEPRECATED! Use one of the task_details below. - // The details of the task. The type URL must be one of the supported task - // details messages and correspond to the Task's type. - google.protobuf.Any details = 3; - - // Output only. The current state of the task. - State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in FAILED state. - google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the task was created. - google.protobuf.Timestamp create_time = 6; - - // Time when the task was last updated. - google.protobuf.Timestamp last_update_time = 7; - - // Output only. Additional information about the orchestration. - MigrationTaskOrchestrationResult orchestration_result = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A subtask for a migration which carries details about the configuration of -// the subtask. The content of the details should not matter to the end user, -// but is a contract between the subtask creator and subtask worker. -message MigrationSubtask { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" - }; - - // Possible states of a migration subtask. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The subtask is ready, i.e. it is ready for execution. - ACTIVE = 1; - - // The subtask is running, i.e. it is assigned to a worker for execution. - RUNNING = 2; - - // The subtask finished successfully. - SUCCEEDED = 3; - - // The subtask finished unsuccessfully. - FAILED = 4; - - // The subtask is paused, i.e., it will not be scheduled. If it was already - // assigned,it might still finish but no new lease renewals will be granted. - PAUSED = 5; - } - - // Output only. Immutable. The resource name for the migration subtask. The ID is - // server-generated. - // - // Example: `projects/123/locations/us/workflows/345/subtasks/678` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The unique ID of the task to which this subtask belongs. - string task_id = 2; - - // The type of the Subtask. The migration service does not check whether this - // is a known type. It is up to the task creator (i.e. orchestrator or worker) - // to ensure it only creates subtasks for which there are compatible workers - // polling for Subtasks. - string type = 3; - - // Output only. The current state of the subtask. - State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in FAILED state. - google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Provides details to errors and issues encountered while processing the - // subtask. Presence of error details does not mean that the subtask failed. - repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The number or resources with errors. Note: This is not the total - // number of errors as each resource can have more than one error. - // This is used to indicate truncation by having a `resource_error_count` - // that is higher than the size of `resource_error_details`. - int32 resource_error_count = 13; - - // Time when the subtask was created. - google.protobuf.Timestamp create_time = 7; - - // Time when the subtask was last updated. - google.protobuf.Timestamp last_update_time = 8; - - // The metrics for the subtask. - repeated TimeSeries metrics = 11; -} - -// Additional information from the orchestrator when it is done with the -// task orchestration. -message MigrationTaskOrchestrationResult { - // Details specific to the task type. - oneof details { - // Details specific to assessment task types. - AssessmentOrchestrationResultDetails assessment_details = 1; - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto deleted file mode 100644 index 89dac5e..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationErrorDetailsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Provides details for errors and the corresponding resources. -message ResourceErrorDetail { - // Required. Information about the resource where the error is located. - google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The error details for the resource. - repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. How many errors there are in total for the resource. Truncation can be - // indicated by having an `error_count` that is higher than the size of - // `error_details`. - int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Provides details for errors, e.g. issues that where encountered when -// processing a subtask. -message ErrorDetail { - // Optional. The exact location within the resource (if applicable). - ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Describes the cause of the error with structured detail. - google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Holds information about where the error is located. -message ErrorLocation { - // Optional. If applicable, denotes the line where the error occurred. A zero value - // means that there is no line information. - int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If applicable, denotes the column where the error occurred. A zero value - // means that there is no columns information. - int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto deleted file mode 100644 index ce60dd2..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/distribution.proto"; -import "google/api/field_behavior.proto"; -import "google/api/metric.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationMetricsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// The metrics object for a SubTask. -message TimeSeries { - // Required. The name of the metric. - // - // If the metric is not known by the service yet, it will be auto-created. - string metric = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The value type of the time series. - google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric kind of the time series. - // - // If present, it must be the same as the metric kind of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // this field specifies the metric kind of the new descriptor and must be - // either `GAUGE` (the default) or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time does not need to be supplied, but if it is supplied, it must - // equal the end time. For `DELTA` metrics, the start and end time should - // specify a non-zero interval, with subsequent points specifying contiguous - // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end - // time should specify a non-zero interval, with subsequent points specifying - // the same start time and increasing end times, until an event resets the - // cumulative value to zero and sets a new start time for the following - // points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A time interval extending just after a start time through an end time. -// If the start time is the same as the end time, then the interval -// represents a single point in time. -message TimeInterval { - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately +/-9.2x10^18. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately +/-10^(+/-300) and it has 16 significant digits of - // precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto deleted file mode 100644 index 9a184a1..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto +++ /dev/null @@ -1,247 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationServiceProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Service to handle EDW migrations. -service MigrationService { - option (google.api.default_host) = "bigquerymigration.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a migration workflow. - rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - post: "/v2alpha/{parent=projects/*/locations/*}/workflows" - body: "migration_workflow" - }; - option (google.api.method_signature) = "parent,migration_workflow"; - } - - // Gets a previously created migration workflow. - rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration workflow. - rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { - option (google.api.http) = { - get: "/v2alpha/{parent=projects/*/locations/*}/workflows" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a migration workflow by name. - rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Starts a previously created migration workflow. I.e., the state transitions - // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - // An error will be signaled if the state is anything other than DRAFT or - // RUNNING. - rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Gets a previously created migration subtask. - rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { - option (google.api.http) = { - get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration subtasks. - rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { - option (google.api.http) = { - get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" - }; - option (google.api.method_signature) = "parent"; - } -} - -// Request to create a migration workflow resource. -message CreateMigrationWorkflowRequest { - // Required. The name of the project to which this migration workflow belongs. - // Example: `projects/foo/locations/bar` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // Required. The migration workflow to create. - MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to get a previously created migration workflow. -message GetMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; -} - -// A request to list previously created migration workflows. -message ListMigrationWorkflowsRequest { - // Required. The project and location of the migration workflows to list. - // Example: `projects/123/locations/us` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; - - // The maximum number of migration workflows to return. The service may return - // fewer than this number. - int32 page_size = 3; - - // A page token, received from previous `ListMigrationWorkflows` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationWorkflows` - // must match the call that provided the page token. - string page_token = 4; -} - -// Response object for a `ListMigrationWorkflows` call. -message ListMigrationWorkflowsResponse { - // The migration workflows for the specified project / location. - repeated MigrationWorkflow migration_workflows = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a previously created migration workflow. -message DeleteMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to start a previously created migration workflow. -message StartMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to get a previously created migration subtasks. -message GetMigrationSubtaskRequest { - // Required. The unique identifier for the migration subtask. - // Example: `projects/123/locations/us/workflows/1234/subtasks/543` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to list previously created migration subtasks. -message ListMigrationSubtasksRequest { - // Required. The migration task of the subtasks to list. - // Example: `projects/123/locations/us/workflows/1234` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of migration tasks to return. The service may return - // fewer than this number. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token, received from previous `ListMigrationSubtasks` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationSubtasks` - // must match the call that provided the page token. - string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The filter to apply. This can be used to get the subtasks of a specific - // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - // task ID (not the name in the named map). - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response object for a `ListMigrationSubtasks` call. -message ListMigrationSubtasksResponse { - // The migration subtasks for the specified task. - repeated MigrationSubtask migration_subtasks = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto deleted file mode 100644 index bf4b27e..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "TranslationTaskProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Mapping between an input and output file to be translated in a subtask. -message TranslationFileMapping { - // The Cloud Storage path for a file to translation in a subtask. - string input_path = 1; - - // The Cloud Storage path to write back the corresponding input file to. - string output_path = 2; -} - -// The translation task config to capture necessary settings for a translation -// task and subtask. -message TranslationTaskDetails { - // The file encoding types. - enum FileEncoding { - // File encoding setting is not specified. - FILE_ENCODING_UNSPECIFIED = 0; - - // File encoding is UTF_8. - UTF_8 = 1; - - // File encoding is ISO_8859_1. - ISO_8859_1 = 2; - - // File encoding is US_ASCII. - US_ASCII = 3; - - // File encoding is UTF_16. - UTF_16 = 4; - - // File encoding is UTF_16LE. - UTF_16LE = 5; - - // File encoding is UTF_16BE. - UTF_16BE = 6; - } - - // The special token data type. - enum TokenType { - // Token type is not specified. - TOKEN_TYPE_UNSPECIFIED = 0; - - // Token type as string. - STRING = 1; - - // Token type as integer. - INT64 = 2; - - // Token type as numeric. - NUMERIC = 3; - - // Token type as boolean. - BOOL = 4; - - // Token type as float. - FLOAT64 = 5; - - // Token type as date. - DATE = 6; - - // Token type as timestamp. - TIMESTAMP = 7; - } - - // The language specific settings for the translation task. - oneof language_options { - // The Teradata SQL specific settings for the translation task. - TeradataOptions teradata_options = 10; - - // The BTEQ specific settings for the translation task. - BteqOptions bteq_options = 11; - } - - // The Cloud Storage path for translation input files. - string input_path = 1; - - // The Cloud Storage path for translation output files. - string output_path = 2; - - // Cloud Storage files to be processed for translation. - repeated TranslationFileMapping file_paths = 12; - - // The Cloud Storage path to DDL files as table schema to assist semantic - // translation. - string schema_path = 3; - - // The file encoding type. - FileEncoding file_encoding = 4; - - // The settings for SQL identifiers. - IdentifierSettings identifier_settings = 5; - - // The map capturing special tokens to be replaced during translation. The key - // is special token in string. The value is the token data type. This is used - // to translate SQL query template which contains special token as place - // holder. The special token makes a query invalid to parse. This map will be - // applied to annotate those special token with types to let parser understand - // how to parse them into proper structure with type information. - map special_token_map = 6; - - // The filter applied to translation details. - Filter filter = 7; - - // Specifies the exact name of the bigquery table ("dataset.table") to be used - // for surfacing raw translation errors. If the table does not exist, we will - // create it. If it already exists and the schema is the same, we will re-use. - // If the table exists and the schema is different, we will throw an error. - string translation_exception_table = 13; -} - -// The filter applied to fields of translation details. -message Filter { - // The list of prefixes used to exclude processing for input files. - repeated string input_file_exclusion_prefixes = 1; -} - -// Settings related to SQL identifiers. -message IdentifierSettings { - // The identifier case type. - enum IdentifierCase { - // The identifier case is not specified. - IDENTIFIER_CASE_UNSPECIFIED = 0; - - // Identifiers' cases will be kept as the original cases. - ORIGINAL = 1; - - // Identifiers will be in upper cases. - UPPER = 2; - - // Identifiers will be in lower cases. - LOWER = 3; - } - - // The SQL identifier rewrite mode. - enum IdentifierRewriteMode { - // SQL Identifier rewrite mode is unspecified. - IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0; - - // SQL identifiers won't be rewrite. - NONE = 1; - - // All SQL identifiers will be rewrite. - REWRITE_ALL = 2; - } - - // The setting to control output queries' identifier case. - IdentifierCase output_identifier_case = 1; - - // Specifies the rewrite mode for SQL identifiers. - IdentifierRewriteMode identifier_rewrite_mode = 2; -} - -// Teradata SQL specific translation task related settings. -message TeradataOptions { - -} - -// BTEQ translation task related settings. -message BteqOptions { - // Specifies the project and dataset in BigQuery that will be used for - // external table creation during the translation. - DatasetReference project_dataset = 1; - - // The Cloud Storage location to be used as the default path for files that - // are not otherwise specified in the file replacement map. - string default_path_uri = 2; - - // Maps the local paths that are used in BTEQ scripts (the keys) to the paths - // in Cloud Storage that should be used in their stead in the translation (the - // value). - map file_replacement_map = 3; -} - -// Reference to a BigQuery dataset. -message DatasetReference { - // A unique ID for this dataset, without the project name. The ID - // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). - // The maximum length is 1,024 characters. - string dataset_id = 1; - - // The ID of the project containing this dataset. - string project_id = 2; -} diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js deleted file mode 100644 index 3c13323..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, migrationWorkflow) { - // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - */ - // const parent = 'abc123' - /** - * Required. The migration workflow to create. - */ - // const migrationWorkflow = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callCreateMigrationWorkflow() { - // Construct request - const request = { - parent, - migrationWorkflow, - }; - - // Run request - const response = await migrationClient.createMigrationWorkflow(request); - console.log(response); - } - - callCreateMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js deleted file mode 100644 index 1637924..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callDeleteMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.deleteMigrationWorkflow(request); - console.log(response); - } - - callDeleteMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js deleted file mode 100644 index be42b52..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - */ - // const name = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationSubtask() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationSubtask(request); - console.log(response); - } - - callGetMigrationSubtask(); - // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js deleted file mode 100644 index f3ef8c0..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationWorkflow(request); - console.log(response); - } - - callGetMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js deleted file mode 100644 index 4b1e2a7..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const parent = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - /** - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - /** - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - */ - // const filter = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationSubtasks() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationSubtasksAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationSubtasks(); - // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js deleted file mode 100644 index 0b71d06..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - */ - // const parent = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - /** - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationWorkflows() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationWorkflowsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationWorkflows(); - // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js deleted file mode 100644 index bcdcbbf..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callStartMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.startMigrationWorkflow(request); - console.log(response); - } - - callStartMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json deleted file mode 100644 index c5336c2..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json +++ /dev/null @@ -1,335 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-migration", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2alpha", - "version": "v2alpha" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", - "title": "MigrationService createMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Creates a migration workflow.", - "canonical": true, - "file": "migration_service.create_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 59, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "migration_workflow", - "type": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", - "title": "MigrationService getMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration workflow.", - "canonical": true, - "file": "migration_service.get_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", - "title": "MigrationService listMigrationWorkflows Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration workflow.", - "canonical": true, - "file": "migration_service.list_migration_workflows.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 72, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", - "title": "MigrationService deleteMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Deletes a migration workflow by name.", - "canonical": true, - "file": "migration_service.delete_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", - "title": "MigrationService startMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", - "canonical": true, - "file": "migration_service.start_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", - "title": "MigrationService getMigrationSubtask Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration subtask.", - "canonical": true, - "file": "migration_service.get_migration_subtask.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationSubtask", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", - "title": "MigrationService listMigrationSubtasks Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration subtasks.", - "canonical": true, - "file": "migration_service.list_migration_subtasks.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 78, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v2alpha/src/index.ts b/owl-bot-staging/v2alpha/src/index.ts deleted file mode 100644 index 288e629..0000000 --- a/owl-bot-staging/v2alpha/src/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v2alpha from './v2alpha'; -const MigrationServiceClient = v2alpha.MigrationServiceClient; -type MigrationServiceClient = v2alpha.MigrationServiceClient; -export {v2alpha, MigrationServiceClient}; -export default {v2alpha, MigrationServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json deleted file mode 100644 index f751ba9..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.migration.v2alpha", - "libraryPackage": "@google-cloud/migration", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/index.ts b/owl-bot-staging/v2alpha/src/v2alpha/index.ts deleted file mode 100644 index f75d208..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts deleted file mode 100644 index 0fbbe62..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts +++ /dev/null @@ -1,1246 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; - -import {Transform} from 'stream'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v2alpha/migration_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './migration_service_client_config.json'; - -const version = require('../../../package.json').version; - -/** - * Service to handle EDW migrations. - * @class - * @memberof v2alpha - */ -export class MigrationServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - migrationServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of MigrationServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof MigrationServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' - ), - migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listMigrationWorkflows: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), - listMigrationSubtasks: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.migration.v2alpha.MigrationService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.migrationServiceStub) { - return this.migrationServiceStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.migration.v2alpha.MigrationService. - this.migrationServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2alpha.MigrationService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.migration.v2alpha.MigrationService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const migrationServiceStubMethods = - ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; - for (const methodName of migrationServiceStubMethods) { - const callPromise = this.migrationServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor, - this._opts.fallback - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.migrationServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates a migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - * @param {google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} request.migrationWorkflow - * Required. The migration workflow to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.create_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async - */ - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.get_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async - */ - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationWorkflow(request, options, callback); - } -/** - * Deletes a migration workflow by name. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.delete_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async - */ - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); - } -/** - * Starts a previously created migration workflow. I.e., the state transitions - * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - * An error will be signaled if the state is anything other than DRAFT or - * RUNNING. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.start_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async - */ - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.startMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration subtask. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.get_migration_subtask.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async - */ - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationSubtask(request, options, callback); - } - - /** - * Lists previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse - ]>; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationWorkflows(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflowsStream( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationWorkflows`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.list_migration_workflows.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async - */ - listMigrationWorkflowsAsync( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.asyncIterate( - this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - /** - * Lists previously created migration subtasks. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse - ]>; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationSubtasks(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasksStream( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationSubtasks`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.list_migration_subtasks.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async - */ - listMigrationSubtasksAsync( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.asyncIterate( - this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified migrationSubtask resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @param {string} subtask - * @returns {string} Resource name string. - */ - migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { - return this.pathTemplates.migrationSubtaskPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - subtask: subtask, - }); - } - - /** - * Parse the project from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; - } - - /** - * Parse the location from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; - } - - /** - * Parse the workflow from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; - } - - /** - * Parse the subtask from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the subtask. - */ - matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; - } - - /** - * Return a fully-qualified migrationWorkflow resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @returns {string} Resource name string. - */ - migrationWorkflowPath(project:string,location:string,workflow:string) { - return this.pathTemplates.migrationWorkflowPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - }); - } - - /** - * Parse the project from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; - } - - /** - * Parse the location from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; - } - - /** - * Parse the workflow from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.migrationServiceStub && !this._terminated) { - return this.migrationServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json deleted file mode 100644 index 2184b83..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.migration.v2alpha.MigrationService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetMigrationWorkflow": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationWorkflows": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "DeleteMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartMigrationWorkflow": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "GetMigrationSubtask": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationSubtasks": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - } - } - } - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json deleted file mode 100644 index 8e91e42..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto" -] diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js deleted file mode 100644 index aa2c893..0000000 --- a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/migration'); - -function main() { - const migrationServiceClient = new migration.MigrationServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 0afe940..0000000 --- a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {MigrationServiceClient} from '@google-cloud/migration'; - -// check that the client class type name can be used -function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const migrationServiceClient = new MigrationServiceClient(); - doStuffWithMigrationServiceClient(migrationServiceClient); -} - -main(); diff --git a/owl-bot-staging/v2alpha/system-test/install.ts b/owl-bot-staging/v2alpha/system-test/install.ts deleted file mode 100644 index 557a575..0000000 --- a/owl-bot-staging/v2alpha/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {packNTest} from 'pack-n-play'; -import {readFileSync} from 'fs'; -import {describe, it} from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts deleted file mode 100644 index 99aac57..0000000 --- a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts +++ /dev/null @@ -1,1256 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import {describe, it} from 'mocha'; -import * as migrationserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v2alpha.MigrationServiceClient', () => { - describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - }); - - describe('createMigrationWorkflow', () => { - it('invokes createMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.createMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationWorkflow', () => { - it('invokes getMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - }); - }); - - describe('deleteMigrationWorkflow', () => { - it('invokes deleteMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.deleteMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - }); - }); - - describe('startMigrationWorkflow', () => { - it('invokes startMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.startMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationSubtask', () => { - it('invokes getMigrationSubtask without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationSubtask(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationSubtask( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationSubtask with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - }); - }); - - describe('listMigrationWorkflows', () => { - it('invokes listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationWorkflows(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflows without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationWorkflows( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationWorkflows(request), expectedError); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflowsStream without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationWorkflowsStream with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; - const iterable = client.listMigrationWorkflowsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationWorkflowsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('listMigrationSubtasks', () => { - it('invokes listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationSubtasks(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasks without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationSubtasks( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationSubtasks(request), expectedError); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasksStream without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationSubtasksStream with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; - const iterable = client.listMigrationSubtasksAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationSubtasksAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationSubtask', () => { - const fakePath = "/rendered/path/migrationSubtask"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - subtask: "subtaskValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationSubtaskPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationSubtaskPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationSubtaskPath', () => { - const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationSubtaskName', () => { - const result = client.matchProjectFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationSubtaskName', () => { - const result = client.matchLocationFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationSubtaskName', () => { - const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchSubtaskFromMigrationSubtaskName', () => { - const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "subtaskValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationWorkflow', () => { - const fakePath = "/rendered/path/migrationWorkflow"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationWorkflowPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationWorkflowPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationWorkflowPath', () => { - const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationWorkflowName', () => { - const result = client.matchProjectFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationWorkflowName', () => { - const result = client.matchLocationFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationWorkflowName', () => { - const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v2alpha/tsconfig.json b/owl-bot-staging/v2alpha/tsconfig.json deleted file mode 100644 index c78f1c8..0000000 --- a/owl-bot-staging/v2alpha/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v2alpha/webpack.config.js b/owl-bot-staging/v2alpha/webpack.config.js deleted file mode 100644 index 9178c4b..0000000 --- a/owl-bot-staging/v2alpha/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'MigrationService', - filename: './migration-service.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -}; diff --git a/samples/generated/v2/migration_service.create_migration_workflow.js b/samples/generated/v2/migration_service.create_migration_workflow.js index 9aed895..817b25b 100644 --- a/samples/generated/v2/migration_service.create_migration_workflow.js +++ b/samples/generated/v2/migration_service.create_migration_workflow.js @@ -23,6 +23,9 @@ function main(parent, migrationWorkflow) { // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2/migration_service.delete_migration_workflow.js b/samples/generated/v2/migration_service.delete_migration_workflow.js index aeda293..b3907a6 100644 --- a/samples/generated/v2/migration_service.delete_migration_workflow.js +++ b/samples/generated/v2/migration_service.delete_migration_workflow.js @@ -23,6 +23,9 @@ function main(name) { // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2/migration_service.get_migration_subtask.js b/samples/generated/v2/migration_service.get_migration_subtask.js index df6f7d2..949eb5a 100644 --- a/samples/generated/v2/migration_service.get_migration_subtask.js +++ b/samples/generated/v2/migration_service.get_migration_subtask.js @@ -23,6 +23,9 @@ function main(name) { // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2/migration_service.get_migration_workflow.js b/samples/generated/v2/migration_service.get_migration_workflow.js index 72f7565..39e79e3 100644 --- a/samples/generated/v2/migration_service.get_migration_workflow.js +++ b/samples/generated/v2/migration_service.get_migration_workflow.js @@ -23,6 +23,9 @@ function main(name) { // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2/migration_service.list_migration_subtasks.js b/samples/generated/v2/migration_service.list_migration_subtasks.js index 95e5833..4a63df1 100644 --- a/samples/generated/v2/migration_service.list_migration_subtasks.js +++ b/samples/generated/v2/migration_service.list_migration_subtasks.js @@ -23,6 +23,9 @@ function main(parent) { // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2/migration_service.list_migration_workflows.js b/samples/generated/v2/migration_service.list_migration_workflows.js index 63402d4..66dc66f 100644 --- a/samples/generated/v2/migration_service.list_migration_workflows.js +++ b/samples/generated/v2/migration_service.list_migration_workflows.js @@ -23,6 +23,9 @@ function main(parent) { // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2/migration_service.start_migration_workflow.js b/samples/generated/v2/migration_service.start_migration_workflow.js index 76c614c..34fb75b 100644 --- a/samples/generated/v2/migration_service.start_migration_workflow.js +++ b/samples/generated/v2/migration_service.start_migration_workflow.js @@ -23,6 +23,9 @@ function main(name) { // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json index 9827112..259f84f 100644 --- a/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json +++ b/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json @@ -22,7 +22,7 @@ "segments": [ { "start": 25, - "end": 56, + "end": 59, "type": "FULL" } ], @@ -66,7 +66,7 @@ "segments": [ { "start": 25, - "end": 55, + "end": 58, "type": "FULL" } ], @@ -110,7 +110,7 @@ "segments": [ { "start": 25, - "end": 69, + "end": 72, "type": "FULL" } ], @@ -162,7 +162,7 @@ "segments": [ { "start": 25, - "end": 51, + "end": 54, "type": "FULL" } ], @@ -202,7 +202,7 @@ "segments": [ { "start": 25, - "end": 51, + "end": 54, "type": "FULL" } ], @@ -242,7 +242,7 @@ "segments": [ { "start": 25, - "end": 55, + "end": 58, "type": "FULL" } ], @@ -286,7 +286,7 @@ "segments": [ { "start": 25, - "end": 75, + "end": 78, "type": "FULL" } ], diff --git a/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/samples/generated/v2alpha/migration_service.create_migration_workflow.js index 4004520..3c13323 100644 --- a/samples/generated/v2alpha/migration_service.create_migration_workflow.js +++ b/samples/generated/v2alpha/migration_service.create_migration_workflow.js @@ -23,6 +23,9 @@ function main(parent, migrationWorkflow) { // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/samples/generated/v2alpha/migration_service.delete_migration_workflow.js index 04adf0d..1637924 100644 --- a/samples/generated/v2alpha/migration_service.delete_migration_workflow.js +++ b/samples/generated/v2alpha/migration_service.delete_migration_workflow.js @@ -23,6 +23,9 @@ function main(name) { // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/samples/generated/v2alpha/migration_service.get_migration_subtask.js index 8bf28e2..be42b52 100644 --- a/samples/generated/v2alpha/migration_service.get_migration_subtask.js +++ b/samples/generated/v2alpha/migration_service.get_migration_subtask.js @@ -23,6 +23,9 @@ function main(name) { // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/samples/generated/v2alpha/migration_service.get_migration_workflow.js index bbc9e68..f3ef8c0 100644 --- a/samples/generated/v2alpha/migration_service.get_migration_workflow.js +++ b/samples/generated/v2alpha/migration_service.get_migration_workflow.js @@ -23,6 +23,9 @@ function main(name) { // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/samples/generated/v2alpha/migration_service.list_migration_subtasks.js index 61a50a0..4b1e2a7 100644 --- a/samples/generated/v2alpha/migration_service.list_migration_subtasks.js +++ b/samples/generated/v2alpha/migration_service.list_migration_subtasks.js @@ -23,6 +23,9 @@ function main(parent) { // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/samples/generated/v2alpha/migration_service.list_migration_workflows.js index 41d06f8..0b71d06 100644 --- a/samples/generated/v2alpha/migration_service.list_migration_workflows.js +++ b/samples/generated/v2alpha/migration_service.list_migration_workflows.js @@ -23,6 +23,9 @@ function main(parent) { // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/samples/generated/v2alpha/migration_service.start_migration_workflow.js index 236300c..bcdcbbf 100644 --- a/samples/generated/v2alpha/migration_service.start_migration_workflow.js +++ b/samples/generated/v2alpha/migration_service.start_migration_workflow.js @@ -23,6 +23,9 @@ function main(name) { // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. * TODO(developer): Uncomment these variables before running the sample. */ /** diff --git a/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json index 5b87250..3929d87 100644 --- a/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json +++ b/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json @@ -22,7 +22,7 @@ "segments": [ { "start": 25, - "end": 56, + "end": 59, "type": "FULL" } ], @@ -66,7 +66,7 @@ "segments": [ { "start": 25, - "end": 55, + "end": 58, "type": "FULL" } ], @@ -110,7 +110,7 @@ "segments": [ { "start": 25, - "end": 69, + "end": 72, "type": "FULL" } ], @@ -162,7 +162,7 @@ "segments": [ { "start": 25, - "end": 51, + "end": 54, "type": "FULL" } ], @@ -202,7 +202,7 @@ "segments": [ { "start": 25, - "end": 51, + "end": 54, "type": "FULL" } ], @@ -242,7 +242,7 @@ "segments": [ { "start": 25, - "end": 55, + "end": 58, "type": "FULL" } ], @@ -286,7 +286,7 @@ "segments": [ { "start": 25, - "end": 75, + "end": 78, "type": "FULL" } ], diff --git a/src/v2/migration_service_client.ts b/src/v2/migration_service_client.ts index 7827dd6..add606f 100644 --- a/src/v2/migration_service_client.ts +++ b/src/v2/migration_service_client.ts @@ -28,7 +28,6 @@ import { } from 'google-gax'; import {Transform} from 'stream'; -import {RequestType} from 'google-gax/build/src/apitypes'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); /** @@ -268,7 +267,8 @@ export class MigrationServiceClient { const apiCall = this._gaxModule.createApiCall( callPromise, this._defaults[methodName], - descriptor + descriptor, + this._opts.fallback ); this.innerApiCalls[methodName] = apiCall; @@ -999,7 +999,7 @@ export class MigrationServiceClient { const callSettings = defaultCallSettings.merge(options); this.initialize(); return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as gax.GaxCall, + this.innerApiCalls.listMigrationWorkflows as GaxCall, request, callSettings ); @@ -1055,7 +1055,7 @@ export class MigrationServiceClient { this.initialize(); return this.descriptors.page.listMigrationWorkflows.asyncIterate( this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as unknown as RequestType, + request as {}, callSettings ) as AsyncIterable; } @@ -1220,7 +1220,7 @@ export class MigrationServiceClient { const callSettings = defaultCallSettings.merge(options); this.initialize(); return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as gax.GaxCall, + this.innerApiCalls.listMigrationSubtasks as GaxCall, request, callSettings ); @@ -1280,7 +1280,7 @@ export class MigrationServiceClient { this.initialize(); return this.descriptors.page.listMigrationSubtasks.asyncIterate( this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as unknown as RequestType, + request as {}, callSettings ) as AsyncIterable; } diff --git a/src/v2alpha/migration_service_client.ts b/src/v2alpha/migration_service_client.ts index a201987..c6b8fca 100644 --- a/src/v2alpha/migration_service_client.ts +++ b/src/v2alpha/migration_service_client.ts @@ -28,7 +28,6 @@ import { } from 'google-gax'; import {Transform} from 'stream'; -import {RequestType} from 'google-gax/build/src/apitypes'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); /** @@ -268,7 +267,8 @@ export class MigrationServiceClient { const apiCall = this._gaxModule.createApiCall( callPromise, this._defaults[methodName], - descriptor + descriptor, + this._opts.fallback ); this.innerApiCalls[methodName] = apiCall; @@ -999,7 +999,7 @@ export class MigrationServiceClient { const callSettings = defaultCallSettings.merge(options); this.initialize(); return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as gax.GaxCall, + this.innerApiCalls.listMigrationWorkflows as GaxCall, request, callSettings ); @@ -1055,7 +1055,7 @@ export class MigrationServiceClient { this.initialize(); return this.descriptors.page.listMigrationWorkflows.asyncIterate( this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as unknown as RequestType, + request as {}, callSettings ) as AsyncIterable; } @@ -1220,7 +1220,7 @@ export class MigrationServiceClient { const callSettings = defaultCallSettings.merge(options); this.initialize(); return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as gax.GaxCall, + this.innerApiCalls.listMigrationSubtasks as GaxCall, request, callSettings ); @@ -1280,7 +1280,7 @@ export class MigrationServiceClient { this.initialize(); return this.descriptors.page.listMigrationSubtasks.asyncIterate( this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as unknown as RequestType, + request as {}, callSettings ) as AsyncIterable; } diff --git a/test/gapic_migration_service_v2.ts b/test/gapic_migration_service_v2.ts index f29b4b0..6b034cf 100644 --- a/test/gapic_migration_service_v2.ts +++ b/test/gapic_migration_service_v2.ts @@ -113,101 +113,103 @@ function stubAsyncIterationCall( } describe('v2.MigrationServiceClient', () => { - it('has servicePath', () => { - const servicePath = - migrationserviceModule.v2.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = - migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient(); - assert(client); - }); + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = + migrationserviceModule.v2.MigrationServiceClient.servicePath; + assert(servicePath); + }); - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - fallback: true, + it('has apiEndpoint', () => { + const apiEndpoint = + migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); }); - assert(client); - }); - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has port', () => { + const port = migrationserviceModule.v2.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient(); + assert(client); }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + fallback: true, + }); + assert(client); }); - }); - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); }); - }); - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); }); - client.auth.getProjectId = sinon - .stub() - .callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error | null, projectId?: string | null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); }); describe('createMigrationWorkflow', () => { diff --git a/test/gapic_migration_service_v2alpha.ts b/test/gapic_migration_service_v2alpha.ts index 8c621eb..0fdc0f1 100644 --- a/test/gapic_migration_service_v2alpha.ts +++ b/test/gapic_migration_service_v2alpha.ts @@ -113,101 +113,104 @@ function stubAsyncIterationCall( } describe('v2alpha.MigrationServiceClient', () => { - it('has servicePath', () => { - const servicePath = - migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = - migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); - assert(client); - }); + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = + migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; + assert(servicePath); + }); - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - fallback: true, + it('has apiEndpoint', () => { + const apiEndpoint = + migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); }); - assert(client); - }); - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has port', () => { + const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('should create a client with no option', () => { + const client = + new migrationserviceModule.v2alpha.MigrationServiceClient(); + assert(client); }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + fallback: true, + }); + assert(client); }); - }); - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); }); - }); - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); }); - client.auth.getProjectId = sinon - .stub() - .callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error | null, projectId?: string | null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); }); describe('createMigrationWorkflow', () => { From f7c34b0e78e43156abe1420f2feee2be92990ab8 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 24 Aug 2022 16:57:01 +0000 Subject: [PATCH 07/13] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- protos/protos.d.ts | 21 +++++++ protos/protos.js | 143 ++++++++++++++++++++++++++++++++------------- 2 files changed, 122 insertions(+), 42 deletions(-) diff --git a/protos/protos.d.ts b/protos/protos.d.ts index f1537c9..0b77705 100644 --- a/protos/protos.d.ts +++ b/protos/protos.d.ts @@ -2655,6 +2655,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for PostgresqlDialect + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a PrestoDialect. */ @@ -2739,6 +2746,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for PrestoDialect + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of a MySQLDialect. */ @@ -2823,6 +2837,13 @@ export namespace google { * @returns JSON object */ public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for MySQLDialect + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; } /** Properties of an ObjectNameMappingList. */ diff --git a/protos/protos.js b/protos/protos.js index 3fe22cb..070373b 100644 --- a/protos/protos.js +++ b/protos/protos.js @@ -4026,48 +4026,62 @@ while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { - case 1: - message.bigqueryDialect = $root.google.cloud.bigquery.migration.v2.BigQueryDialect.decode(reader, reader.uint32()); - break; - case 2: - message.hiveqlDialect = $root.google.cloud.bigquery.migration.v2.HiveQLDialect.decode(reader, reader.uint32()); - break; - case 3: - message.redshiftDialect = $root.google.cloud.bigquery.migration.v2.RedshiftDialect.decode(reader, reader.uint32()); - break; - case 4: - message.teradataDialect = $root.google.cloud.bigquery.migration.v2.TeradataDialect.decode(reader, reader.uint32()); - break; - case 5: - message.oracleDialect = $root.google.cloud.bigquery.migration.v2.OracleDialect.decode(reader, reader.uint32()); - break; - case 6: - message.sparksqlDialect = $root.google.cloud.bigquery.migration.v2.SparkSQLDialect.decode(reader, reader.uint32()); - break; - case 7: - message.snowflakeDialect = $root.google.cloud.bigquery.migration.v2.SnowflakeDialect.decode(reader, reader.uint32()); - break; - case 8: - message.netezzaDialect = $root.google.cloud.bigquery.migration.v2.NetezzaDialect.decode(reader, reader.uint32()); - break; - case 9: - message.azureSynapseDialect = $root.google.cloud.bigquery.migration.v2.AzureSynapseDialect.decode(reader, reader.uint32()); - break; - case 10: - message.verticaDialect = $root.google.cloud.bigquery.migration.v2.VerticaDialect.decode(reader, reader.uint32()); - break; - case 11: - message.sqlServerDialect = $root.google.cloud.bigquery.migration.v2.SQLServerDialect.decode(reader, reader.uint32()); - break; - case 12: - message.postgresqlDialect = $root.google.cloud.bigquery.migration.v2.PostgresqlDialect.decode(reader, reader.uint32()); - break; - case 13: - message.prestoDialect = $root.google.cloud.bigquery.migration.v2.PrestoDialect.decode(reader, reader.uint32()); - break; - case 14: - message.mysqlDialect = $root.google.cloud.bigquery.migration.v2.MySQLDialect.decode(reader, reader.uint32()); - break; + case 1: { + message.bigqueryDialect = $root.google.cloud.bigquery.migration.v2.BigQueryDialect.decode(reader, reader.uint32()); + break; + } + case 2: { + message.hiveqlDialect = $root.google.cloud.bigquery.migration.v2.HiveQLDialect.decode(reader, reader.uint32()); + break; + } + case 3: { + message.redshiftDialect = $root.google.cloud.bigquery.migration.v2.RedshiftDialect.decode(reader, reader.uint32()); + break; + } + case 4: { + message.teradataDialect = $root.google.cloud.bigquery.migration.v2.TeradataDialect.decode(reader, reader.uint32()); + break; + } + case 5: { + message.oracleDialect = $root.google.cloud.bigquery.migration.v2.OracleDialect.decode(reader, reader.uint32()); + break; + } + case 6: { + message.sparksqlDialect = $root.google.cloud.bigquery.migration.v2.SparkSQLDialect.decode(reader, reader.uint32()); + break; + } + case 7: { + message.snowflakeDialect = $root.google.cloud.bigquery.migration.v2.SnowflakeDialect.decode(reader, reader.uint32()); + break; + } + case 8: { + message.netezzaDialect = $root.google.cloud.bigquery.migration.v2.NetezzaDialect.decode(reader, reader.uint32()); + break; + } + case 9: { + message.azureSynapseDialect = $root.google.cloud.bigquery.migration.v2.AzureSynapseDialect.decode(reader, reader.uint32()); + break; + } + case 10: { + message.verticaDialect = $root.google.cloud.bigquery.migration.v2.VerticaDialect.decode(reader, reader.uint32()); + break; + } + case 11: { + message.sqlServerDialect = $root.google.cloud.bigquery.migration.v2.SQLServerDialect.decode(reader, reader.uint32()); + break; + } + case 12: { + message.postgresqlDialect = $root.google.cloud.bigquery.migration.v2.PostgresqlDialect.decode(reader, reader.uint32()); + break; + } + case 13: { + message.prestoDialect = $root.google.cloud.bigquery.migration.v2.PrestoDialect.decode(reader, reader.uint32()); + break; + } + case 14: { + message.mysqlDialect = $root.google.cloud.bigquery.migration.v2.MySQLDialect.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -6589,6 +6603,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for PostgresqlDialect + * @function getTypeUrl + * @memberof google.cloud.bigquery.migration.v2.PostgresqlDialect + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + PostgresqlDialect.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.migration.v2.PostgresqlDialect"; + }; + return PostgresqlDialect; })(); @@ -6749,6 +6778,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for PrestoDialect + * @function getTypeUrl + * @memberof google.cloud.bigquery.migration.v2.PrestoDialect + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + PrestoDialect.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.migration.v2.PrestoDialect"; + }; + return PrestoDialect; })(); @@ -6909,6 +6953,21 @@ return this.constructor.toObject(this, $protobuf.util.toJSONOptions); }; + /** + * Gets the default type url for MySQLDialect + * @function getTypeUrl + * @memberof google.cloud.bigquery.migration.v2.MySQLDialect + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + MySQLDialect.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.bigquery.migration.v2.MySQLDialect"; + }; + return MySQLDialect; })(); From 54e23376fa84c3277a3bbc9b3d912fcb1cfa27b2 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 24 Aug 2022 21:06:24 +0000 Subject: [PATCH 08/13] build: fix artifact name for bigquery migration PiperOrigin-RevId: 469790470 Source-Link: https://github.com/googleapis/googleapis/commit/842682e6708411676b9ee94a63fc366d4406bb81 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ff37d3431a178aa1601fe688767f7a3306dde149 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZmYzN2QzNDMxYTE3OGFhMTYwMWZlNjg4NzY3ZjdhMzMwNmRkZTE0OSJ9 --- owl-bot-staging/v2/.eslintignore | 7 + owl-bot-staging/v2/.eslintrc.json | 3 + owl-bot-staging/v2/.gitignore | 14 + owl-bot-staging/v2/.jsdoc.js | 55 + owl-bot-staging/v2/.mocharc.js | 33 + owl-bot-staging/v2/.prettierrc.js | 22 + owl-bot-staging/v2/README.md | 1 + owl-bot-staging/v2/linkinator.config.json | 16 + owl-bot-staging/v2/package.json | 64 + .../migration/v2/migration_entities.proto | 233 +++ .../v2/migration_error_details.proto | 62 + .../migration/v2/migration_metrics.proto | 111 ++ .../migration/v2/migration_service.proto | 245 ++++ .../migration/v2/translation_config.proto | 257 ++++ ...ation_service.create_migration_workflow.js | 67 + ...ation_service.delete_migration_workflow.js | 62 + ...migration_service.get_migration_subtask.js | 66 + ...igration_service.get_migration_workflow.js | 66 + ...gration_service.list_migration_subtasks.js | 86 ++ ...ration_service.list_migration_workflows.js | 80 ++ ...ration_service.start_migration_workflow.js | 62 + ...ta.google.cloud.bigquery.migration.v2.json | 335 +++++ owl-bot-staging/v2/src/index.ts | 25 + owl-bot-staging/v2/src/v2/gapic_metadata.json | 101 ++ owl-bot-staging/v2/src/v2/index.ts | 19 + .../v2/src/v2/migration_service_client.ts | 1246 ++++++++++++++++ .../v2/migration_service_client_config.json | 71 + .../src/v2/migration_service_proto_list.json | 7 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 32 + owl-bot-staging/v2/system-test/install.ts | 49 + .../v2/test/gapic_migration_service_v2.ts | 1256 +++++++++++++++++ owl-bot-staging/v2/tsconfig.json | 19 + owl-bot-staging/v2/webpack.config.js | 64 + owl-bot-staging/v2alpha/.eslintignore | 7 + owl-bot-staging/v2alpha/.eslintrc.json | 3 + owl-bot-staging/v2alpha/.gitignore | 14 + owl-bot-staging/v2alpha/.jsdoc.js | 55 + owl-bot-staging/v2alpha/.mocharc.js | 33 + owl-bot-staging/v2alpha/.prettierrc.js | 22 + owl-bot-staging/v2alpha/README.md | 1 + .../v2alpha/linkinator.config.json | 16 + owl-bot-staging/v2alpha/package.json | 64 + .../migration/v2alpha/assessment_task.proto | 49 + .../v2alpha/migration_entities.proto | 244 ++++ .../v2alpha/migration_error_details.proto | 62 + .../migration/v2alpha/migration_metrics.proto | 111 ++ .../migration/v2alpha/migration_service.proto | 247 ++++ .../migration/v2alpha/translation_task.proto | 207 +++ ...ation_service.create_migration_workflow.js | 67 + ...ation_service.delete_migration_workflow.js | 62 + ...migration_service.get_migration_subtask.js | 66 + ...igration_service.get_migration_workflow.js | 66 + ...gration_service.list_migration_subtasks.js | 86 ++ ...ration_service.list_migration_workflows.js | 80 ++ ...ration_service.start_migration_workflow.js | 62 + ...ogle.cloud.bigquery.migration.v2alpha.json | 335 +++++ owl-bot-staging/v2alpha/src/index.ts | 25 + .../v2alpha/src/v2alpha/gapic_metadata.json | 101 ++ owl-bot-staging/v2alpha/src/v2alpha/index.ts | 19 + .../src/v2alpha/migration_service_client.ts | 1246 ++++++++++++++++ .../migration_service_client_config.json | 73 + .../v2alpha/migration_service_proto_list.json | 8 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 32 + .../v2alpha/system-test/install.ts | 49 + .../test/gapic_migration_service_v2alpha.ts | 1256 +++++++++++++++++ owl-bot-staging/v2alpha/tsconfig.json | 19 + owl-bot-staging/v2alpha/webpack.config.js | 64 + 69 files changed, 9741 insertions(+) create mode 100644 owl-bot-staging/v2/.eslintignore create mode 100644 owl-bot-staging/v2/.eslintrc.json create mode 100644 owl-bot-staging/v2/.gitignore create mode 100644 owl-bot-staging/v2/.jsdoc.js create mode 100644 owl-bot-staging/v2/.mocharc.js create mode 100644 owl-bot-staging/v2/.prettierrc.js create mode 100644 owl-bot-staging/v2/README.md create mode 100644 owl-bot-staging/v2/linkinator.config.json create mode 100644 owl-bot-staging/v2/package.json create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json create mode 100644 owl-bot-staging/v2/src/index.ts create mode 100644 owl-bot-staging/v2/src/v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/src/v2/index.ts create mode 100644 owl-bot-staging/v2/src/v2/migration_service_client.ts create mode 100644 owl-bot-staging/v2/src/v2/migration_service_client_config.json create mode 100644 owl-bot-staging/v2/src/v2/migration_service_proto_list.json create mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v2/system-test/install.ts create mode 100644 owl-bot-staging/v2/test/gapic_migration_service_v2.ts create mode 100644 owl-bot-staging/v2/tsconfig.json create mode 100644 owl-bot-staging/v2/webpack.config.js create mode 100644 owl-bot-staging/v2alpha/.eslintignore create mode 100644 owl-bot-staging/v2alpha/.eslintrc.json create mode 100644 owl-bot-staging/v2alpha/.gitignore create mode 100644 owl-bot-staging/v2alpha/.jsdoc.js create mode 100644 owl-bot-staging/v2alpha/.mocharc.js create mode 100644 owl-bot-staging/v2alpha/.prettierrc.js create mode 100644 owl-bot-staging/v2alpha/README.md create mode 100644 owl-bot-staging/v2alpha/linkinator.config.json create mode 100644 owl-bot-staging/v2alpha/package.json create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json create mode 100644 owl-bot-staging/v2alpha/src/index.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/index.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json create mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v2alpha/system-test/install.ts create mode 100644 owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts create mode 100644 owl-bot-staging/v2alpha/tsconfig.json create mode 100644 owl-bot-staging/v2alpha/webpack.config.js diff --git a/owl-bot-staging/v2/.eslintignore b/owl-bot-staging/v2/.eslintignore new file mode 100644 index 0000000..cfc348e --- /dev/null +++ b/owl-bot-staging/v2/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v2/.eslintrc.json b/owl-bot-staging/v2/.eslintrc.json new file mode 100644 index 0000000..7821534 --- /dev/null +++ b/owl-bot-staging/v2/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v2/.gitignore b/owl-bot-staging/v2/.gitignore new file mode 100644 index 0000000..5d32b23 --- /dev/null +++ b/owl-bot-staging/v2/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v2/.jsdoc.js b/owl-bot-staging/v2/.jsdoc.js new file mode 100644 index 0000000..c3c1e3d --- /dev/null +++ b/owl-bot-staging/v2/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/bigquery-migration', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v2/.mocharc.js b/owl-bot-staging/v2/.mocharc.js new file mode 100644 index 0000000..481c522 --- /dev/null +++ b/owl-bot-staging/v2/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v2/.prettierrc.js b/owl-bot-staging/v2/.prettierrc.js new file mode 100644 index 0000000..494e147 --- /dev/null +++ b/owl-bot-staging/v2/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v2/README.md b/owl-bot-staging/v2/README.md new file mode 100644 index 0000000..4e0341c --- /dev/null +++ b/owl-bot-staging/v2/README.md @@ -0,0 +1 @@ +Migration: Nodejs Client diff --git a/owl-bot-staging/v2/linkinator.config.json b/owl-bot-staging/v2/linkinator.config.json new file mode 100644 index 0000000..befd23c --- /dev/null +++ b/owl-bot-staging/v2/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2/package.json b/owl-bot-staging/v2/package.json new file mode 100644 index 0000000..407e1b9 --- /dev/null +++ b/owl-bot-staging/v2/package.json @@ -0,0 +1,64 @@ +{ + "name": "@google-cloud/bigquery-migration", + "version": "0.1.0", + "description": "Migration client for Node.js", + "repository": "googleapis/nodejs-migration", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google migration", + "migration", + "migration service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^3.2.0" + }, + "devDependencies": { + "@types/mocha": "^9.1.1", + "@types/node": "^16.11.50", + "@types/sinon": "^10.0.13", + "c8": "^7.12.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.11", + "jsdoc-fresh": "^2.0.0", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^4.0.2", + "mocha": "^10.0.0", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^8.4.0", + "typescript": "^4.7.4", + "webpack": "^4.46.0", + "webpack-cli": "^4.10.0" + }, + "engines": { + "node": ">=v12" + } +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto new file mode 100644 index 0000000..7d77bae --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto @@ -0,0 +1,233 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2/migration_metrics.proto"; +import "google/cloud/bigquery/migration/v2/translation_config.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. + // The ID is server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // The details of the task. + oneof task_details { + // Task configuration for Batch SQL Translation. + TranslationConfigDetails translation_config_details = 14; + } + + // Output only. Immutable. The unique identifier for the migration task. The + // ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be one of the supported task types: + // Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ, + // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, + // Translation_Snowflake2BQ, Translation_Netezza2BQ, + // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, + // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. + string type = 2; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in + // FAILED state. + google.rpc.ErrorInfo processing_error = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID + // is server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in + // FAILED state. + google.rpc.ErrorInfo processing_error = 6 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while + // processing the subtask. Presence of error details does not mean that the + // subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto new file mode 100644 index 0000000..199e2db --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto new file mode 100644 index 0000000..e52fead --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately `+/-9.2x10^18`. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately `+/-10^(+/-300)` and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto new file mode 100644 index 0000000..3c1a89e --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto @@ -0,0 +1,245 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2/migration_entities.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto new file mode 100644 index 0000000..994140d --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto @@ -0,0 +1,257 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "TranslationConfigProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// The translation config to capture necessary settings for a translation task +// and subtask. +message TranslationConfigDetails { + // The chosen path where the source for input files will be found. + oneof source_location { + // The Cloud Storage path for a directory of files to translate in a task. + string gcs_source_path = 1; + } + + // The chosen path where the destination for output files will be found. + oneof target_location { + // The Cloud Storage path to write back the corresponding input files to. + string gcs_target_path = 2; + } + + // The dialect of the input files. + Dialect source_dialect = 3; + + // The target dialect for the engine to translate the input to. + Dialect target_dialect = 4; + + // The mapping of full SQL object names from their current state to the + // desired output. + oneof output_name_mapping { + // The mapping of objects to their desired output names in list form. + ObjectNameMappingList name_mapping_list = 5; + } + + // The default source environment values for the translation. + SourceEnv source_env = 6; +} + +// The possible dialect options for translation. +message Dialect { + // The possible dialect options that this message represents. + oneof dialect_value { + // The BigQuery dialect + BigQueryDialect bigquery_dialect = 1; + + // The HiveQL dialect + HiveQLDialect hiveql_dialect = 2; + + // The Redshift dialect + RedshiftDialect redshift_dialect = 3; + + // The Teradata dialect + TeradataDialect teradata_dialect = 4; + + // The Oracle dialect + OracleDialect oracle_dialect = 5; + + // The SparkSQL dialect + SparkSQLDialect sparksql_dialect = 6; + + // The Snowflake dialect + SnowflakeDialect snowflake_dialect = 7; + + // The Netezza dialect + NetezzaDialect netezza_dialect = 8; + + // The Azure Synapse dialect + AzureSynapseDialect azure_synapse_dialect = 9; + + // The Vertica dialect + VerticaDialect vertica_dialect = 10; + + // The SQL Server dialect + SQLServerDialect sql_server_dialect = 11; + + // The Postgresql dialect + PostgresqlDialect postgresql_dialect = 12; + + // The Presto dialect + PrestoDialect presto_dialect = 13; + + // The MySQL dialect + MySQLDialect mysql_dialect = 14; + } +} + +// The dialect definition for BigQuery. +message BigQueryDialect {} + +// The dialect definition for HiveQL. +message HiveQLDialect {} + +// The dialect definition for Redshift. +message RedshiftDialect {} + +// The dialect definition for Teradata. +message TeradataDialect { + // The sub-dialect options for Teradata. + enum Mode { + // Unspecified mode. + MODE_UNSPECIFIED = 0; + + // Teradata SQL mode. + SQL = 1; + + // BTEQ mode (which includes SQL). + BTEQ = 2; + } + + // Which Teradata sub-dialect mode the user specifies. + Mode mode = 1; +} + +// The dialect definition for Oracle. +message OracleDialect {} + +// The dialect definition for SparkSQL. +message SparkSQLDialect {} + +// The dialect definition for Snowflake. +message SnowflakeDialect {} + +// The dialect definition for Netezza. +message NetezzaDialect {} + +// The dialect definition for Azure Synapse. +message AzureSynapseDialect {} + +// The dialect definition for Vertica. +message VerticaDialect {} + +// The dialect definition for SQL Server. +message SQLServerDialect {} + +// The dialect definition for Postgresql. +message PostgresqlDialect {} + +// The dialect definition for Presto. +message PrestoDialect {} + +// The dialect definition for MySQL. +message MySQLDialect {} + +// Represents a map of name mappings using a list of key:value proto messages of +// existing name to desired output name. +message ObjectNameMappingList { + // The elements of the object name map. + repeated ObjectNameMapping name_map = 1; +} + +// Represents a key-value pair of NameMappingKey to NameMappingValue to +// represent the mapping of SQL names from the input value to desired output. +message ObjectNameMapping { + // The name of the object in source that is being mapped. + NameMappingKey source = 1; + + // The desired target name of the object that is being mapped. + NameMappingValue target = 2; +} + +// The potential components of a full name mapping that will be mapped +// during translation in the source data warehouse. +message NameMappingKey { + // The type of the object that is being mapped. + enum Type { + // Unspecified name mapping type. + TYPE_UNSPECIFIED = 0; + + // The object being mapped is a database. + DATABASE = 1; + + // The object being mapped is a schema. + SCHEMA = 2; + + // The object being mapped is a relation. + RELATION = 3; + + // The object being mapped is an attribute. + ATTRIBUTE = 4; + + // The object being mapped is a relation alias. + RELATION_ALIAS = 5; + + // The object being mapped is a an attribute alias. + ATTRIBUTE_ALIAS = 6; + + // The object being mapped is a function. + FUNCTION = 7; + } + + // The type of object that is being mapped. + Type type = 1; + + // The database name (BigQuery project ID equivalent in the source data + // warehouse). + string database = 2; + + // The schema name (BigQuery dataset equivalent in the source data warehouse). + string schema = 3; + + // The relation name (BigQuery table or view equivalent in the source data + // warehouse). + string relation = 4; + + // The attribute name (BigQuery column equivalent in the source data + // warehouse). + string attribute = 5; +} + +// The potential components of a full name mapping that will be mapped +// during translation in the target data warehouse. +message NameMappingValue { + // The database name (BigQuery project ID equivalent in the target data + // warehouse). + string database = 1; + + // The schema name (BigQuery dataset equivalent in the target data warehouse). + string schema = 2; + + // The relation name (BigQuery table or view equivalent in the target data + // warehouse). + string relation = 3; + + // The attribute name (BigQuery column equivalent in the target data + // warehouse). + string attribute = 4; +} + +// Represents the default source environment values for the translation. +message SourceEnv { + // The default database name to fully qualify SQL objects when their database + // name is missing. + string default_database = 1; + + // The schema search path. When SQL objects are missing schema name, + // translation engine will search through this list to find the value. + repeated string schema_search_path = 2; +} diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js new file mode 100644 index 0000000..8301c3a --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js @@ -0,0 +1,67 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, migrationWorkflow) { + // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + */ + // const parent = 'abc123' + /** + * Required. The migration workflow to create. + */ + // const migrationWorkflow = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callCreateMigrationWorkflow() { + // Construct request + const request = { + parent, + migrationWorkflow, + }; + + // Run request + const response = await migrationClient.createMigrationWorkflow(request); + console.log(response); + } + + callCreateMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js new file mode 100644 index 0000000..9f0651e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callDeleteMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.deleteMigrationWorkflow(request); + console.log(response); + } + + callDeleteMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js new file mode 100644 index 0000000..25de9e0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + */ + // const name = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationSubtask() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationSubtask(request); + console.log(response); + } + + callGetMigrationSubtask(); + // [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js new file mode 100644 index 0000000..52ab5cd --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationWorkflow(request); + console.log(response); + } + + callGetMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js new file mode 100644 index 0000000..c5c7ed0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js @@ -0,0 +1,86 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const parent = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + /** + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + /** + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + */ + // const filter = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationSubtasks() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationSubtasksAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationSubtasks(); + // [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js new file mode 100644 index 0000000..ebd2127 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js @@ -0,0 +1,80 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + */ + // const parent = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + /** + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationWorkflows() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationWorkflowsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationWorkflows(); + // [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js new file mode 100644 index 0000000..7f8257d --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callStartMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.startMigrationWorkflow(request); + console.log(response); + } + + callStartMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json new file mode 100644 index 0000000..81ec8bb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json @@ -0,0 +1,335 @@ +{ + "clientLibrary": { + "name": "nodejs-migration", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2", + "version": "v2" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", + "title": "MigrationService createMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Creates a migration workflow.", + "canonical": true, + "file": "migration_service.create_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 59, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "migration_workflow", + "type": ".google.cloud.bigquery.migration.v2.MigrationWorkflow" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", + "title": "MigrationService getMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration workflow.", + "canonical": true, + "file": "migration_service.get_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", + "title": "MigrationService listMigrationWorkflows Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration workflow.", + "canonical": true, + "file": "migration_service.list_migration_workflows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 72, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", + "title": "MigrationService deleteMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Deletes a migration workflow by name.", + "canonical": true, + "file": "migration_service.delete_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", + "title": "MigrationService startMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", + "canonical": true, + "file": "migration_service.start_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", + "title": "MigrationService getMigrationSubtask Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration subtask.", + "canonical": true, + "file": "migration_service.get_migration_subtask.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationSubtask", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", + "title": "MigrationService listMigrationSubtasks Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration subtasks.", + "canonical": true, + "file": "migration_service.list_migration_subtasks.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 78, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v2/src/index.ts b/owl-bot-staging/v2/src/index.ts new file mode 100644 index 0000000..35a8fd9 --- /dev/null +++ b/owl-bot-staging/v2/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v2 from './v2'; +const MigrationServiceClient = v2.MigrationServiceClient; +type MigrationServiceClient = v2.MigrationServiceClient; +export {v2, MigrationServiceClient}; +export default {v2, MigrationServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v2/src/v2/gapic_metadata.json b/owl-bot-staging/v2/src/v2/gapic_metadata.json new file mode 100644 index 0000000..1b6a33c --- /dev/null +++ b/owl-bot-staging/v2/src/v2/gapic_metadata.json @@ -0,0 +1,101 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.migration.v2", + "libraryPackage": "@google-cloud/bigquery-migration", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/src/v2/index.ts b/owl-bot-staging/v2/src/v2/index.ts new file mode 100644 index 0000000..f75d208 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2/src/v2/migration_service_client.ts b/owl-bot-staging/v2/src/v2/migration_service_client.ts new file mode 100644 index 0000000..d99d7a2 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_client.ts @@ -0,0 +1,1246 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; + +import {Transform} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v2/migration_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './migration_service_client_config.json'; + +const version = require('../../../package.json').version; + +/** + * Service to handle EDW migrations. + * @class + * @memberof v2 + */ +export class MigrationServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + migrationServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MigrationServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof MigrationServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' + ), + migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listMigrationWorkflows: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), + listMigrationSubtasks: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.migration.v2.MigrationService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.migrationServiceStub) { + return this.migrationServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.migration.v2.MigrationService. + this.migrationServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2.MigrationService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.migration.v2.MigrationService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const migrationServiceStubMethods = + ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; + for (const methodName of migrationServiceStubMethods) { + const callPromise = this.migrationServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.migrationServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates a migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param {google.cloud.bigquery.migration.v2.MigrationWorkflow} request.migrationWorkflow + * Required. The migration workflow to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.create_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async + */ + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.get_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async + */ + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationWorkflow(request, options, callback); + } +/** + * Deletes a migration workflow by name. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.delete_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async + */ + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); + } +/** + * Starts a previously created migration workflow. I.e., the state transitions + * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + * An error will be signaled if the state is anything other than DRAFT or + * RUNNING. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.start_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async + */ + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.startMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration subtask. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.get_migration_subtask.js + * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async + */ + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationSubtask(request, options, callback); + } + + /** + * Lists previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse + ]>; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationWorkflows(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflowsStream( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.createStream( + this.innerApiCalls.listMigrationWorkflows as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationWorkflows`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.list_migration_workflows.js + * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async + */ + listMigrationWorkflowsAsync( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.asyncIterate( + this.innerApiCalls['listMigrationWorkflows'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + /** + * Lists previously created migration subtasks. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse + ]>; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationSubtasks(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasksStream( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.createStream( + this.innerApiCalls.listMigrationSubtasks as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationSubtasks`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.list_migration_subtasks.js + * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async + */ + listMigrationSubtasksAsync( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.asyncIterate( + this.innerApiCalls['listMigrationSubtasks'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified migrationSubtask resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @param {string} subtask + * @returns {string} Resource name string. + */ + migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { + return this.pathTemplates.migrationSubtaskPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + subtask: subtask, + }); + } + + /** + * Parse the project from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; + } + + /** + * Parse the location from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; + } + + /** + * Parse the workflow from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; + } + + /** + * Parse the subtask from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the subtask. + */ + matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; + } + + /** + * Return a fully-qualified migrationWorkflow resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @returns {string} Resource name string. + */ + migrationWorkflowPath(project:string,location:string,workflow:string) { + return this.pathTemplates.migrationWorkflowPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + }); + } + + /** + * Parse the project from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; + } + + /** + * Parse the location from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; + } + + /** + * Parse the workflow from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.migrationServiceStub && !this._terminated) { + return this.migrationServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v2/src/v2/migration_service_client_config.json b/owl-bot-staging/v2/src/v2/migration_service_client_config.json new file mode 100644 index 0000000..5832815 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_client_config.json @@ -0,0 +1,71 @@ +{ + "interfaces": { + "google.cloud.bigquery.migration.v2.MigrationService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 10000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationWorkflows": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "DeleteMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "GetMigrationSubtask": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ListMigrationSubtasks": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json new file mode 100644 index 0000000..57df7ab --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json @@ -0,0 +1,7 @@ +[ + "../../protos/google/cloud/bigquery/migration/v2/migration_entities.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_error_details.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_metrics.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_service.proto", + "../../protos/google/cloud/bigquery/migration/v2/translation_config.proto" +] diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js new file mode 100644 index 0000000..ecc7e4b --- /dev/null +++ b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const migration = require('@google-cloud/bigquery-migration'); + +function main() { + const migrationServiceClient = new migration.MigrationServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts new file mode 100644 index 0000000..80fbe2d --- /dev/null +++ b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,32 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; + +// check that the client class type name can be used +function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const migrationServiceClient = new MigrationServiceClient(); + doStuffWithMigrationServiceClient(migrationServiceClient); +} + +main(); diff --git a/owl-bot-staging/v2/system-test/install.ts b/owl-bot-staging/v2/system-test/install.ts new file mode 100644 index 0000000..557a575 --- /dev/null +++ b/owl-bot-staging/v2/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {packNTest} from 'pack-n-play'; +import {readFileSync} from 'fs'; +import {describe, it} from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts new file mode 100644 index 0000000..061c58c --- /dev/null +++ b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts @@ -0,0 +1,1256 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as migrationserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v2.MigrationServiceClient', () => { + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = migrationserviceModule.v2.MigrationServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = migrationserviceModule.v2.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('createMigrationWorkflow', () => { + it('invokes createMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.createMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationWorkflow', () => { + it('invokes getMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + }); + }); + + describe('deleteMigrationWorkflow', () => { + it('invokes deleteMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.deleteMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + }); + }); + + describe('startMigrationWorkflow', () => { + it('invokes startMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.startMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationSubtask', () => { + it('invokes getMigrationSubtask without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationSubtask(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationSubtask( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationSubtask with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + }); + }); + + describe('listMigrationWorkflows', () => { + it('invokes listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationWorkflows(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflows without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationWorkflows( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationWorkflows(request), expectedError); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflowsStream without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationWorkflowsStream with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; + const iterable = client.listMigrationWorkflowsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationWorkflowsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('listMigrationSubtasks', () => { + it('invokes listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationSubtasks(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasks without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationSubtasks( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationSubtasks(request), expectedError); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasksStream without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationSubtasksStream with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; + const iterable = client.listMigrationSubtasksAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationSubtasksAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationSubtask', () => { + const fakePath = "/rendered/path/migrationSubtask"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + subtask: "subtaskValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationSubtaskPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationSubtaskPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationSubtaskPath', () => { + const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationSubtaskName', () => { + const result = client.matchProjectFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationSubtaskName', () => { + const result = client.matchLocationFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationSubtaskName', () => { + const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchSubtaskFromMigrationSubtaskName', () => { + const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "subtaskValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationWorkflow', () => { + const fakePath = "/rendered/path/migrationWorkflow"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationWorkflowPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationWorkflowPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationWorkflowPath', () => { + const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationWorkflowName', () => { + const result = client.matchProjectFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationWorkflowName', () => { + const result = client.matchLocationFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationWorkflowName', () => { + const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v2/tsconfig.json b/owl-bot-staging/v2/tsconfig.json new file mode 100644 index 0000000..c78f1c8 --- /dev/null +++ b/owl-bot-staging/v2/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v2/webpack.config.js b/owl-bot-staging/v2/webpack.config.js new file mode 100644 index 0000000..9178c4b --- /dev/null +++ b/owl-bot-staging/v2/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'MigrationService', + filename: './migration-service.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; diff --git a/owl-bot-staging/v2alpha/.eslintignore b/owl-bot-staging/v2alpha/.eslintignore new file mode 100644 index 0000000..cfc348e --- /dev/null +++ b/owl-bot-staging/v2alpha/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v2alpha/.eslintrc.json b/owl-bot-staging/v2alpha/.eslintrc.json new file mode 100644 index 0000000..7821534 --- /dev/null +++ b/owl-bot-staging/v2alpha/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v2alpha/.gitignore b/owl-bot-staging/v2alpha/.gitignore new file mode 100644 index 0000000..5d32b23 --- /dev/null +++ b/owl-bot-staging/v2alpha/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v2alpha/.jsdoc.js b/owl-bot-staging/v2alpha/.jsdoc.js new file mode 100644 index 0000000..c3c1e3d --- /dev/null +++ b/owl-bot-staging/v2alpha/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/bigquery-migration', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v2alpha/.mocharc.js b/owl-bot-staging/v2alpha/.mocharc.js new file mode 100644 index 0000000..481c522 --- /dev/null +++ b/owl-bot-staging/v2alpha/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v2alpha/.prettierrc.js b/owl-bot-staging/v2alpha/.prettierrc.js new file mode 100644 index 0000000..494e147 --- /dev/null +++ b/owl-bot-staging/v2alpha/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v2alpha/README.md b/owl-bot-staging/v2alpha/README.md new file mode 100644 index 0000000..4e0341c --- /dev/null +++ b/owl-bot-staging/v2alpha/README.md @@ -0,0 +1 @@ +Migration: Nodejs Client diff --git a/owl-bot-staging/v2alpha/linkinator.config.json b/owl-bot-staging/v2alpha/linkinator.config.json new file mode 100644 index 0000000..befd23c --- /dev/null +++ b/owl-bot-staging/v2alpha/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2alpha/package.json b/owl-bot-staging/v2alpha/package.json new file mode 100644 index 0000000..407e1b9 --- /dev/null +++ b/owl-bot-staging/v2alpha/package.json @@ -0,0 +1,64 @@ +{ + "name": "@google-cloud/bigquery-migration", + "version": "0.1.0", + "description": "Migration client for Node.js", + "repository": "googleapis/nodejs-migration", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google migration", + "migration", + "migration service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^3.2.0" + }, + "devDependencies": { + "@types/mocha": "^9.1.1", + "@types/node": "^16.11.50", + "@types/sinon": "^10.0.13", + "c8": "^7.12.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.11", + "jsdoc-fresh": "^2.0.0", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^4.0.2", + "mocha": "^10.0.0", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^8.4.0", + "typescript": "^4.7.4", + "webpack": "^4.46.0", + "webpack-cli": "^4.10.0" + }, + "engines": { + "node": ">=v12" + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto new file mode 100644 index 0000000..0c6ea13 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto @@ -0,0 +1,49 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "AssessmentTaskProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Assessment task config. +message AssessmentTaskDetails { + // Required. The Cloud Storage path for assessment input files. + string input_path = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The BigQuery dataset for output. + string output_dataset = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. An optional Cloud Storage path to write the query logs (which is + // then used as an input path on the translation task) + string querylogs_path = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data source or data warehouse type (eg: TERADATA/REDSHIFT) + // from which the input data is extracted. + string data_source = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// Details for an assessment task orchestration result. +message AssessmentOrchestrationResultDetails { + // Optional. The version used for the output table schemas. + string output_tables_schema_version = 1 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto new file mode 100644 index 0000000..50d4c75 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto @@ -0,0 +1,244 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/assessment_task.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/cloud/bigquery/migration/v2alpha/translation_task.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // The details of the task. + oneof task_details { + // Task configuration for Assessment. + AssessmentTaskDetails assessment_task_details = 12; + + // Task configuration for Batch/Offline SQL Translation. + TranslationTaskDetails translation_task_details = 13; + } + + // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be a supported task type. + string type = 2; + + // DEPRECATED! Use one of the task_details below. + // The details of the task. The type URL must be one of the supported task + // details messages and correspond to the Task's type. + google.protobuf.Any details = 3; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; + + // Output only. Additional information about the orchestration. + MigrationTaskOrchestrationResult orchestration_result = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while processing the + // subtask. Presence of error details does not mean that the subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} + +// Additional information from the orchestrator when it is done with the +// task orchestration. +message MigrationTaskOrchestrationResult { + // Details specific to the task type. + oneof details { + // Details specific to assessment task types. + AssessmentOrchestrationResultDetails assessment_details = 1; + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto new file mode 100644 index 0000000..89dac5e --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto new file mode 100644 index 0000000..ce60dd2 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately +/-9.2x10^18. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately +/-10^(+/-300) and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto new file mode 100644 index 0000000..9a184a1 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto @@ -0,0 +1,247 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2alpha/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto new file mode 100644 index 0000000..bf4b27e --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto @@ -0,0 +1,207 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "TranslationTaskProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Mapping between an input and output file to be translated in a subtask. +message TranslationFileMapping { + // The Cloud Storage path for a file to translation in a subtask. + string input_path = 1; + + // The Cloud Storage path to write back the corresponding input file to. + string output_path = 2; +} + +// The translation task config to capture necessary settings for a translation +// task and subtask. +message TranslationTaskDetails { + // The file encoding types. + enum FileEncoding { + // File encoding setting is not specified. + FILE_ENCODING_UNSPECIFIED = 0; + + // File encoding is UTF_8. + UTF_8 = 1; + + // File encoding is ISO_8859_1. + ISO_8859_1 = 2; + + // File encoding is US_ASCII. + US_ASCII = 3; + + // File encoding is UTF_16. + UTF_16 = 4; + + // File encoding is UTF_16LE. + UTF_16LE = 5; + + // File encoding is UTF_16BE. + UTF_16BE = 6; + } + + // The special token data type. + enum TokenType { + // Token type is not specified. + TOKEN_TYPE_UNSPECIFIED = 0; + + // Token type as string. + STRING = 1; + + // Token type as integer. + INT64 = 2; + + // Token type as numeric. + NUMERIC = 3; + + // Token type as boolean. + BOOL = 4; + + // Token type as float. + FLOAT64 = 5; + + // Token type as date. + DATE = 6; + + // Token type as timestamp. + TIMESTAMP = 7; + } + + // The language specific settings for the translation task. + oneof language_options { + // The Teradata SQL specific settings for the translation task. + TeradataOptions teradata_options = 10; + + // The BTEQ specific settings for the translation task. + BteqOptions bteq_options = 11; + } + + // The Cloud Storage path for translation input files. + string input_path = 1; + + // The Cloud Storage path for translation output files. + string output_path = 2; + + // Cloud Storage files to be processed for translation. + repeated TranslationFileMapping file_paths = 12; + + // The Cloud Storage path to DDL files as table schema to assist semantic + // translation. + string schema_path = 3; + + // The file encoding type. + FileEncoding file_encoding = 4; + + // The settings for SQL identifiers. + IdentifierSettings identifier_settings = 5; + + // The map capturing special tokens to be replaced during translation. The key + // is special token in string. The value is the token data type. This is used + // to translate SQL query template which contains special token as place + // holder. The special token makes a query invalid to parse. This map will be + // applied to annotate those special token with types to let parser understand + // how to parse them into proper structure with type information. + map special_token_map = 6; + + // The filter applied to translation details. + Filter filter = 7; + + // Specifies the exact name of the bigquery table ("dataset.table") to be used + // for surfacing raw translation errors. If the table does not exist, we will + // create it. If it already exists and the schema is the same, we will re-use. + // If the table exists and the schema is different, we will throw an error. + string translation_exception_table = 13; +} + +// The filter applied to fields of translation details. +message Filter { + // The list of prefixes used to exclude processing for input files. + repeated string input_file_exclusion_prefixes = 1; +} + +// Settings related to SQL identifiers. +message IdentifierSettings { + // The identifier case type. + enum IdentifierCase { + // The identifier case is not specified. + IDENTIFIER_CASE_UNSPECIFIED = 0; + + // Identifiers' cases will be kept as the original cases. + ORIGINAL = 1; + + // Identifiers will be in upper cases. + UPPER = 2; + + // Identifiers will be in lower cases. + LOWER = 3; + } + + // The SQL identifier rewrite mode. + enum IdentifierRewriteMode { + // SQL Identifier rewrite mode is unspecified. + IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0; + + // SQL identifiers won't be rewrite. + NONE = 1; + + // All SQL identifiers will be rewrite. + REWRITE_ALL = 2; + } + + // The setting to control output queries' identifier case. + IdentifierCase output_identifier_case = 1; + + // Specifies the rewrite mode for SQL identifiers. + IdentifierRewriteMode identifier_rewrite_mode = 2; +} + +// Teradata SQL specific translation task related settings. +message TeradataOptions { + +} + +// BTEQ translation task related settings. +message BteqOptions { + // Specifies the project and dataset in BigQuery that will be used for + // external table creation during the translation. + DatasetReference project_dataset = 1; + + // The Cloud Storage location to be used as the default path for files that + // are not otherwise specified in the file replacement map. + string default_path_uri = 2; + + // Maps the local paths that are used in BTEQ scripts (the keys) to the paths + // in Cloud Storage that should be used in their stead in the translation (the + // value). + map file_replacement_map = 3; +} + +// Reference to a BigQuery dataset. +message DatasetReference { + // A unique ID for this dataset, without the project name. The ID + // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). + // The maximum length is 1,024 characters. + string dataset_id = 1; + + // The ID of the project containing this dataset. + string project_id = 2; +} diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js new file mode 100644 index 0000000..d8613fe --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js @@ -0,0 +1,67 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, migrationWorkflow) { + // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + */ + // const parent = 'abc123' + /** + * Required. The migration workflow to create. + */ + // const migrationWorkflow = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callCreateMigrationWorkflow() { + // Construct request + const request = { + parent, + migrationWorkflow, + }; + + // Run request + const response = await migrationClient.createMigrationWorkflow(request); + console.log(response); + } + + callCreateMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js new file mode 100644 index 0000000..e17c7ee --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callDeleteMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.deleteMigrationWorkflow(request); + console.log(response); + } + + callDeleteMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js new file mode 100644 index 0000000..a17add1 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + */ + // const name = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationSubtask() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationSubtask(request); + console.log(response); + } + + callGetMigrationSubtask(); + // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js new file mode 100644 index 0000000..1519196 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationWorkflow(request); + console.log(response); + } + + callGetMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js new file mode 100644 index 0000000..99419dd --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js @@ -0,0 +1,86 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const parent = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + /** + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + /** + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + */ + // const filter = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationSubtasks() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationSubtasksAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationSubtasks(); + // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js new file mode 100644 index 0000000..31c0263 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js @@ -0,0 +1,80 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + */ + // const parent = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + /** + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationWorkflows() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationWorkflowsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationWorkflows(); + // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js new file mode 100644 index 0000000..85288c6 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callStartMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.startMigrationWorkflow(request); + console.log(response); + } + + callStartMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json new file mode 100644 index 0000000..c5336c2 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json @@ -0,0 +1,335 @@ +{ + "clientLibrary": { + "name": "nodejs-migration", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2alpha", + "version": "v2alpha" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", + "title": "MigrationService createMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Creates a migration workflow.", + "canonical": true, + "file": "migration_service.create_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 59, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "migration_workflow", + "type": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", + "title": "MigrationService getMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration workflow.", + "canonical": true, + "file": "migration_service.get_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", + "title": "MigrationService listMigrationWorkflows Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration workflow.", + "canonical": true, + "file": "migration_service.list_migration_workflows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 72, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", + "title": "MigrationService deleteMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Deletes a migration workflow by name.", + "canonical": true, + "file": "migration_service.delete_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", + "title": "MigrationService startMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", + "canonical": true, + "file": "migration_service.start_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", + "title": "MigrationService getMigrationSubtask Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration subtask.", + "canonical": true, + "file": "migration_service.get_migration_subtask.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationSubtask", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", + "title": "MigrationService listMigrationSubtasks Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration subtasks.", + "canonical": true, + "file": "migration_service.list_migration_subtasks.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 78, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v2alpha/src/index.ts b/owl-bot-staging/v2alpha/src/index.ts new file mode 100644 index 0000000..288e629 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v2alpha from './v2alpha'; +const MigrationServiceClient = v2alpha.MigrationServiceClient; +type MigrationServiceClient = v2alpha.MigrationServiceClient; +export {v2alpha, MigrationServiceClient}; +export default {v2alpha, MigrationServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json new file mode 100644 index 0000000..adf8d06 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json @@ -0,0 +1,101 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.migration.v2alpha", + "libraryPackage": "@google-cloud/bigquery-migration", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/index.ts b/owl-bot-staging/v2alpha/src/v2alpha/index.ts new file mode 100644 index 0000000..f75d208 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts new file mode 100644 index 0000000..0fbbe62 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts @@ -0,0 +1,1246 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; + +import {Transform} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v2alpha/migration_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './migration_service_client_config.json'; + +const version = require('../../../package.json').version; + +/** + * Service to handle EDW migrations. + * @class + * @memberof v2alpha + */ +export class MigrationServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + migrationServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MigrationServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof MigrationServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' + ), + migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listMigrationWorkflows: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), + listMigrationSubtasks: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.migration.v2alpha.MigrationService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.migrationServiceStub) { + return this.migrationServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.migration.v2alpha.MigrationService. + this.migrationServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2alpha.MigrationService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.migration.v2alpha.MigrationService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const migrationServiceStubMethods = + ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; + for (const methodName of migrationServiceStubMethods) { + const callPromise = this.migrationServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.migrationServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates a migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param {google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} request.migrationWorkflow + * Required. The migration workflow to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.create_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async + */ + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.get_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async + */ + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationWorkflow(request, options, callback); + } +/** + * Deletes a migration workflow by name. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.delete_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async + */ + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); + } +/** + * Starts a previously created migration workflow. I.e., the state transitions + * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + * An error will be signaled if the state is anything other than DRAFT or + * RUNNING. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.start_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async + */ + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.startMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration subtask. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.get_migration_subtask.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async + */ + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationSubtask(request, options, callback); + } + + /** + * Lists previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse + ]>; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationWorkflows(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflowsStream( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.createStream( + this.innerApiCalls.listMigrationWorkflows as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationWorkflows`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.list_migration_workflows.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async + */ + listMigrationWorkflowsAsync( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.asyncIterate( + this.innerApiCalls['listMigrationWorkflows'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + /** + * Lists previously created migration subtasks. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse + ]>; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationSubtasks(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasksStream( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.createStream( + this.innerApiCalls.listMigrationSubtasks as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationSubtasks`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.list_migration_subtasks.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async + */ + listMigrationSubtasksAsync( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.asyncIterate( + this.innerApiCalls['listMigrationSubtasks'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified migrationSubtask resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @param {string} subtask + * @returns {string} Resource name string. + */ + migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { + return this.pathTemplates.migrationSubtaskPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + subtask: subtask, + }); + } + + /** + * Parse the project from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; + } + + /** + * Parse the location from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; + } + + /** + * Parse the workflow from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; + } + + /** + * Parse the subtask from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the subtask. + */ + matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; + } + + /** + * Return a fully-qualified migrationWorkflow resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @returns {string} Resource name string. + */ + migrationWorkflowPath(project:string,location:string,workflow:string) { + return this.pathTemplates.migrationWorkflowPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + }); + } + + /** + * Parse the project from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; + } + + /** + * Parse the location from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; + } + + /** + * Parse the workflow from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.migrationServiceStub && !this._terminated) { + return this.migrationServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json new file mode 100644 index 0000000..2184b83 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json @@ -0,0 +1,73 @@ +{ + "interfaces": { + "google.cloud.bigquery.migration.v2alpha.MigrationService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 10000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetMigrationWorkflow": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationWorkflows": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "DeleteMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartMigrationWorkflow": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "GetMigrationSubtask": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationSubtasks": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + } + } + } + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json new file mode 100644 index 0000000..8e91e42 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json @@ -0,0 +1,8 @@ +[ + "../../protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto" +] diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js new file mode 100644 index 0000000..ecc7e4b --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const migration = require('@google-cloud/bigquery-migration'); + +function main() { + const migrationServiceClient = new migration.MigrationServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts new file mode 100644 index 0000000..80fbe2d --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,32 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; + +// check that the client class type name can be used +function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const migrationServiceClient = new MigrationServiceClient(); + doStuffWithMigrationServiceClient(migrationServiceClient); +} + +main(); diff --git a/owl-bot-staging/v2alpha/system-test/install.ts b/owl-bot-staging/v2alpha/system-test/install.ts new file mode 100644 index 0000000..557a575 --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {packNTest} from 'pack-n-play'; +import {readFileSync} from 'fs'; +import {describe, it} from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts new file mode 100644 index 0000000..99aac57 --- /dev/null +++ b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts @@ -0,0 +1,1256 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as migrationserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v2alpha.MigrationServiceClient', () => { + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('createMigrationWorkflow', () => { + it('invokes createMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.createMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationWorkflow', () => { + it('invokes getMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + }); + }); + + describe('deleteMigrationWorkflow', () => { + it('invokes deleteMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.deleteMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + }); + }); + + describe('startMigrationWorkflow', () => { + it('invokes startMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.startMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationSubtask', () => { + it('invokes getMigrationSubtask without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationSubtask(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationSubtask( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationSubtask with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + }); + }); + + describe('listMigrationWorkflows', () => { + it('invokes listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationWorkflows(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflows without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationWorkflows( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationWorkflows(request), expectedError); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflowsStream without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationWorkflowsStream with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; + const iterable = client.listMigrationWorkflowsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationWorkflowsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('listMigrationSubtasks', () => { + it('invokes listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationSubtasks(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasks without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationSubtasks( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationSubtasks(request), expectedError); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasksStream without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationSubtasksStream with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; + const iterable = client.listMigrationSubtasksAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationSubtasksAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationSubtask', () => { + const fakePath = "/rendered/path/migrationSubtask"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + subtask: "subtaskValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationSubtaskPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationSubtaskPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationSubtaskPath', () => { + const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationSubtaskName', () => { + const result = client.matchProjectFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationSubtaskName', () => { + const result = client.matchLocationFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationSubtaskName', () => { + const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchSubtaskFromMigrationSubtaskName', () => { + const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "subtaskValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationWorkflow', () => { + const fakePath = "/rendered/path/migrationWorkflow"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationWorkflowPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationWorkflowPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationWorkflowPath', () => { + const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationWorkflowName', () => { + const result = client.matchProjectFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationWorkflowName', () => { + const result = client.matchLocationFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationWorkflowName', () => { + const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v2alpha/tsconfig.json b/owl-bot-staging/v2alpha/tsconfig.json new file mode 100644 index 0000000..c78f1c8 --- /dev/null +++ b/owl-bot-staging/v2alpha/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v2alpha/webpack.config.js b/owl-bot-staging/v2alpha/webpack.config.js new file mode 100644 index 0000000..9178c4b --- /dev/null +++ b/owl-bot-staging/v2alpha/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'MigrationService', + filename: './migration-service.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; From 43a672f8080055e8a19295ac0aeadb7ecebb09c9 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 24 Aug 2022 21:08:14 +0000 Subject: [PATCH 09/13] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .jsdoc.js | 2 +- owl-bot-staging/v2/.eslintignore | 7 - owl-bot-staging/v2/.eslintrc.json | 3 - owl-bot-staging/v2/.gitignore | 14 - owl-bot-staging/v2/.jsdoc.js | 55 - owl-bot-staging/v2/.mocharc.js | 33 - owl-bot-staging/v2/.prettierrc.js | 22 - owl-bot-staging/v2/README.md | 1 - owl-bot-staging/v2/linkinator.config.json | 16 - owl-bot-staging/v2/package.json | 64 - .../migration/v2/migration_entities.proto | 233 --- .../v2/migration_error_details.proto | 62 - .../migration/v2/migration_metrics.proto | 111 -- .../migration/v2/migration_service.proto | 245 ---- .../migration/v2/translation_config.proto | 257 ---- ...ation_service.create_migration_workflow.js | 67 - ...ation_service.delete_migration_workflow.js | 62 - ...migration_service.get_migration_subtask.js | 66 - ...igration_service.get_migration_workflow.js | 66 - ...gration_service.list_migration_subtasks.js | 86 -- ...ration_service.list_migration_workflows.js | 80 -- ...ration_service.start_migration_workflow.js | 62 - ...ta.google.cloud.bigquery.migration.v2.json | 335 ----- owl-bot-staging/v2/src/index.ts | 25 - owl-bot-staging/v2/src/v2/gapic_metadata.json | 101 -- owl-bot-staging/v2/src/v2/index.ts | 19 - .../v2/src/v2/migration_service_client.ts | 1246 ---------------- .../v2/migration_service_client_config.json | 71 - .../src/v2/migration_service_proto_list.json | 7 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 32 - owl-bot-staging/v2/system-test/install.ts | 49 - .../v2/test/gapic_migration_service_v2.ts | 1256 ----------------- owl-bot-staging/v2/tsconfig.json | 19 - owl-bot-staging/v2/webpack.config.js | 64 - owl-bot-staging/v2alpha/.eslintignore | 7 - owl-bot-staging/v2alpha/.eslintrc.json | 3 - owl-bot-staging/v2alpha/.gitignore | 14 - owl-bot-staging/v2alpha/.jsdoc.js | 55 - owl-bot-staging/v2alpha/.mocharc.js | 33 - owl-bot-staging/v2alpha/.prettierrc.js | 22 - owl-bot-staging/v2alpha/README.md | 1 - .../v2alpha/linkinator.config.json | 16 - owl-bot-staging/v2alpha/package.json | 64 - .../migration/v2alpha/assessment_task.proto | 49 - .../v2alpha/migration_entities.proto | 244 ---- .../v2alpha/migration_error_details.proto | 62 - .../migration/v2alpha/migration_metrics.proto | 111 -- .../migration/v2alpha/migration_service.proto | 247 ---- .../migration/v2alpha/translation_task.proto | 207 --- ...ation_service.create_migration_workflow.js | 67 - ...ation_service.delete_migration_workflow.js | 62 - ...migration_service.get_migration_subtask.js | 66 - ...igration_service.get_migration_workflow.js | 66 - ...gration_service.list_migration_subtasks.js | 86 -- ...ration_service.list_migration_workflows.js | 80 -- ...ration_service.start_migration_workflow.js | 62 - ...ogle.cloud.bigquery.migration.v2alpha.json | 335 ----- owl-bot-staging/v2alpha/src/index.ts | 25 - .../v2alpha/src/v2alpha/gapic_metadata.json | 101 -- owl-bot-staging/v2alpha/src/v2alpha/index.ts | 19 - .../src/v2alpha/migration_service_client.ts | 1246 ---------------- .../migration_service_client_config.json | 73 - .../v2alpha/migration_service_proto_list.json | 8 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 32 - .../v2alpha/system-test/install.ts | 49 - .../test/gapic_migration_service_v2alpha.ts | 1256 ----------------- owl-bot-staging/v2alpha/tsconfig.json | 19 - owl-bot-staging/v2alpha/webpack.config.js | 64 - ...ation_service.create_migration_workflow.js | 2 +- ...ation_service.delete_migration_workflow.js | 2 +- ...migration_service.get_migration_subtask.js | 2 +- ...igration_service.get_migration_workflow.js | 2 +- ...gration_service.list_migration_subtasks.js | 2 +- ...ration_service.list_migration_workflows.js | 2 +- ...ration_service.start_migration_workflow.js | 2 +- ...ation_service.create_migration_workflow.js | 2 +- ...ation_service.delete_migration_workflow.js | 2 +- ...migration_service.get_migration_subtask.js | 2 +- ...igration_service.get_migration_workflow.js | 2 +- ...gration_service.list_migration_subtasks.js | 2 +- ...ration_service.list_migration_workflows.js | 2 +- ...ration_service.start_migration_workflow.js | 2 +- src/v2/gapic_metadata.json | 2 +- src/v2alpha/gapic_metadata.json | 2 +- system-test/fixtures/sample/src/index.js | 2 +- system-test/fixtures/sample/src/index.ts | 2 +- 88 files changed, 19 insertions(+), 9760 deletions(-) delete mode 100644 owl-bot-staging/v2/.eslintignore delete mode 100644 owl-bot-staging/v2/.eslintrc.json delete mode 100644 owl-bot-staging/v2/.gitignore delete mode 100644 owl-bot-staging/v2/.jsdoc.js delete mode 100644 owl-bot-staging/v2/.mocharc.js delete mode 100644 owl-bot-staging/v2/.prettierrc.js delete mode 100644 owl-bot-staging/v2/README.md delete mode 100644 owl-bot-staging/v2/linkinator.config.json delete mode 100644 owl-bot-staging/v2/package.json delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json delete mode 100644 owl-bot-staging/v2/src/index.ts delete mode 100644 owl-bot-staging/v2/src/v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/src/v2/index.ts delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_client.ts delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_client_config.json delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_proto_list.json delete mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v2/system-test/install.ts delete mode 100644 owl-bot-staging/v2/test/gapic_migration_service_v2.ts delete mode 100644 owl-bot-staging/v2/tsconfig.json delete mode 100644 owl-bot-staging/v2/webpack.config.js delete mode 100644 owl-bot-staging/v2alpha/.eslintignore delete mode 100644 owl-bot-staging/v2alpha/.eslintrc.json delete mode 100644 owl-bot-staging/v2alpha/.gitignore delete mode 100644 owl-bot-staging/v2alpha/.jsdoc.js delete mode 100644 owl-bot-staging/v2alpha/.mocharc.js delete mode 100644 owl-bot-staging/v2alpha/.prettierrc.js delete mode 100644 owl-bot-staging/v2alpha/README.md delete mode 100644 owl-bot-staging/v2alpha/linkinator.config.json delete mode 100644 owl-bot-staging/v2alpha/package.json delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json delete mode 100644 owl-bot-staging/v2alpha/src/index.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/index.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json delete mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v2alpha/system-test/install.ts delete mode 100644 owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts delete mode 100644 owl-bot-staging/v2alpha/tsconfig.json delete mode 100644 owl-bot-staging/v2alpha/webpack.config.js diff --git a/.jsdoc.js b/.jsdoc.js index aabe555..c3c1e3d 100644 --- a/.jsdoc.js +++ b/.jsdoc.js @@ -43,7 +43,7 @@ module.exports = { copyright: 'Copyright 2022 Google LLC', includeDate: false, sourceFiles: false, - systemName: '@google-cloud/migration', + systemName: '@google-cloud/bigquery-migration', theme: 'lumen', default: { outputSourceFiles: false diff --git a/owl-bot-staging/v2/.eslintignore b/owl-bot-staging/v2/.eslintignore deleted file mode 100644 index cfc348e..0000000 --- a/owl-bot-staging/v2/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v2/.eslintrc.json b/owl-bot-staging/v2/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/owl-bot-staging/v2/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v2/.gitignore b/owl-bot-staging/v2/.gitignore deleted file mode 100644 index 5d32b23..0000000 --- a/owl-bot-staging/v2/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v2/.jsdoc.js b/owl-bot-staging/v2/.jsdoc.js deleted file mode 100644 index c3c1e3d..0000000 --- a/owl-bot-staging/v2/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/bigquery-migration', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v2/.mocharc.js b/owl-bot-staging/v2/.mocharc.js deleted file mode 100644 index 481c522..0000000 --- a/owl-bot-staging/v2/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v2/.prettierrc.js b/owl-bot-staging/v2/.prettierrc.js deleted file mode 100644 index 494e147..0000000 --- a/owl-bot-staging/v2/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v2/README.md b/owl-bot-staging/v2/README.md deleted file mode 100644 index 4e0341c..0000000 --- a/owl-bot-staging/v2/README.md +++ /dev/null @@ -1 +0,0 @@ -Migration: Nodejs Client diff --git a/owl-bot-staging/v2/linkinator.config.json b/owl-bot-staging/v2/linkinator.config.json deleted file mode 100644 index befd23c..0000000 --- a/owl-bot-staging/v2/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v2/package.json b/owl-bot-staging/v2/package.json deleted file mode 100644 index 407e1b9..0000000 --- a/owl-bot-staging/v2/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "@google-cloud/bigquery-migration", - "version": "0.1.0", - "description": "Migration client for Node.js", - "repository": "googleapis/nodejs-migration", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google migration", - "migration", - "migration service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^3.2.0" - }, - "devDependencies": { - "@types/mocha": "^9.1.1", - "@types/node": "^16.11.50", - "@types/sinon": "^10.0.13", - "c8": "^7.12.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.11", - "jsdoc-fresh": "^2.0.0", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^4.0.2", - "mocha": "^10.0.0", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^8.4.0", - "typescript": "^4.7.4", - "webpack": "^4.46.0", - "webpack-cli": "^4.10.0" - }, - "engines": { - "node": ">=v12" - } -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto deleted file mode 100644 index 7d77bae..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto +++ /dev/null @@ -1,233 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2/migration_metrics.proto"; -import "google/cloud/bigquery/migration/v2/translation_config.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationEntitiesProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// A migration workflow which specifies what needs to be done for an EDW -// migration. -message MigrationWorkflow { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}" - }; - - // Possible migration workflow states. - enum State { - // Workflow state is unspecified. - STATE_UNSPECIFIED = 0; - - // Workflow is in draft status, i.e. tasks are not yet eligible for - // execution. - DRAFT = 1; - - // Workflow is running (i.e. tasks are eligible for execution). - RUNNING = 2; - - // Workflow is paused. Tasks currently in progress may continue, but no - // further tasks will be scheduled. - PAUSED = 3; - - // Workflow is complete. There should not be any task in a non-terminal - // state, but if they are (e.g. forced termination), they will not be - // scheduled. - COMPLETED = 4; - } - - // Output only. Immutable. The unique identifier for the migration workflow. - // The ID is server-generated. - // - // Example: `projects/123/locations/us/workflows/345` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The display name of the workflow. This can be set to give a workflow - // a descriptive name. There is no guarantee or enforcement of uniqueness. - string display_name = 6; - - // The tasks in a workflow in a named map. The name (i.e. key) has no - // meaning and is merely a convenient way to address a specific task - // in a workflow. - map tasks = 2; - - // Output only. That status of the workflow. - State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the workflow was created. - google.protobuf.Timestamp create_time = 4; - - // Time when the workflow was last updated. - google.protobuf.Timestamp last_update_time = 5; -} - -// A single task for a migration which has details about the configuration of -// the task. -message MigrationTask { - // Possible states of a migration task. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The task is waiting for orchestration. - PENDING = 1; - - // The task is assigned to an orchestrator. - ORCHESTRATING = 2; - - // The task is running, i.e. its subtasks are ready for execution. - RUNNING = 3; - - // Tha task is paused. Assigned subtasks can continue, but no new subtasks - // will be scheduled. - PAUSED = 4; - - // The task finished successfully. - SUCCEEDED = 5; - - // The task finished unsuccessfully. - FAILED = 6; - } - - // The details of the task. - oneof task_details { - // Task configuration for Batch SQL Translation. - TranslationConfigDetails translation_config_details = 14; - } - - // Output only. Immutable. The unique identifier for the migration task. The - // ID is server-generated. - string id = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The type of the task. This must be one of the supported task types: - // Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ, - // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, - // Translation_Snowflake2BQ, Translation_Netezza2BQ, - // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, - // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. - string type = 2; - - // Output only. The current state of the task. - State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in - // FAILED state. - google.rpc.ErrorInfo processing_error = 5 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the task was created. - google.protobuf.Timestamp create_time = 6; - - // Time when the task was last updated. - google.protobuf.Timestamp last_update_time = 7; -} - -// A subtask for a migration which carries details about the configuration of -// the subtask. The content of the details should not matter to the end user, -// but is a contract between the subtask creator and subtask worker. -message MigrationSubtask { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" - }; - - // Possible states of a migration subtask. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The subtask is ready, i.e. it is ready for execution. - ACTIVE = 1; - - // The subtask is running, i.e. it is assigned to a worker for execution. - RUNNING = 2; - - // The subtask finished successfully. - SUCCEEDED = 3; - - // The subtask finished unsuccessfully. - FAILED = 4; - - // The subtask is paused, i.e., it will not be scheduled. If it was already - // assigned,it might still finish but no new lease renewals will be granted. - PAUSED = 5; - } - - // Output only. Immutable. The resource name for the migration subtask. The ID - // is server-generated. - // - // Example: `projects/123/locations/us/workflows/345/subtasks/678` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The unique ID of the task to which this subtask belongs. - string task_id = 2; - - // The type of the Subtask. The migration service does not check whether this - // is a known type. It is up to the task creator (i.e. orchestrator or worker) - // to ensure it only creates subtasks for which there are compatible workers - // polling for Subtasks. - string type = 3; - - // Output only. The current state of the subtask. - State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in - // FAILED state. - google.rpc.ErrorInfo processing_error = 6 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Provides details to errors and issues encountered while - // processing the subtask. Presence of error details does not mean that the - // subtask failed. - repeated ResourceErrorDetail resource_error_details = 12 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The number or resources with errors. Note: This is not the total - // number of errors as each resource can have more than one error. - // This is used to indicate truncation by having a `resource_error_count` - // that is higher than the size of `resource_error_details`. - int32 resource_error_count = 13; - - // Time when the subtask was created. - google.protobuf.Timestamp create_time = 7; - - // Time when the subtask was last updated. - google.protobuf.Timestamp last_update_time = 8; - - // The metrics for the subtask. - repeated TimeSeries metrics = 11; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto deleted file mode 100644 index 199e2db..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/field_behavior.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationErrorDetailsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// Provides details for errors and the corresponding resources. -message ResourceErrorDetail { - // Required. Information about the resource where the error is located. - google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The error details for the resource. - repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. How many errors there are in total for the resource. Truncation can be - // indicated by having an `error_count` that is higher than the size of - // `error_details`. - int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Provides details for errors, e.g. issues that where encountered when -// processing a subtask. -message ErrorDetail { - // Optional. The exact location within the resource (if applicable). - ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Describes the cause of the error with structured detail. - google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Holds information about where the error is located. -message ErrorLocation { - // Optional. If applicable, denotes the line where the error occurred. A zero value - // means that there is no line information. - int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If applicable, denotes the column where the error occurred. A zero value - // means that there is no columns information. - int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto deleted file mode 100644 index e52fead..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/distribution.proto"; -import "google/api/field_behavior.proto"; -import "google/api/metric.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationMetricsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// The metrics object for a SubTask. -message TimeSeries { - // Required. The name of the metric. - // - // If the metric is not known by the service yet, it will be auto-created. - string metric = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The value type of the time series. - google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric kind of the time series. - // - // If present, it must be the same as the metric kind of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // this field specifies the metric kind of the new descriptor and must be - // either `GAUGE` (the default) or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time does not need to be supplied, but if it is supplied, it must - // equal the end time. For `DELTA` metrics, the start and end time should - // specify a non-zero interval, with subsequent points specifying contiguous - // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end - // time should specify a non-zero interval, with subsequent points specifying - // the same start time and increasing end times, until an event resets the - // cumulative value to zero and sets a new start time for the following - // points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A time interval extending just after a start time through an end time. -// If the start time is the same as the end time, then the interval -// represents a single point in time. -message TimeInterval { - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately `+/-9.2x10^18`. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately `+/-10^(+/-300)` and it has 16 significant digits of - // precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto deleted file mode 100644 index 3c1a89e..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto +++ /dev/null @@ -1,245 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2/migration_entities.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationServiceProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// Service to handle EDW migrations. -service MigrationService { - option (google.api.default_host) = "bigquerymigration.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a migration workflow. - rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - post: "/v2/{parent=projects/*/locations/*}/workflows" - body: "migration_workflow" - }; - option (google.api.method_signature) = "parent,migration_workflow"; - } - - // Gets a previously created migration workflow. - rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - get: "/v2/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration workflow. - rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { - option (google.api.http) = { - get: "/v2/{parent=projects/*/locations/*}/workflows" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a migration workflow by name. - rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Starts a previously created migration workflow. I.e., the state transitions - // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - // An error will be signaled if the state is anything other than DRAFT or - // RUNNING. - rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2/{name=projects/*/locations/*/workflows/*}:start" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Gets a previously created migration subtask. - rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { - option (google.api.http) = { - get: "/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration subtasks. - rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { - option (google.api.http) = { - get: "/v2/{parent=projects/*/locations/*/workflows/*}/subtasks" - }; - option (google.api.method_signature) = "parent"; - } -} - -// Request to create a migration workflow resource. -message CreateMigrationWorkflowRequest { - // Required. The name of the project to which this migration workflow belongs. - // Example: `projects/foo/locations/bar` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // Required. The migration workflow to create. - MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to get a previously created migration workflow. -message GetMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; -} - -// A request to list previously created migration workflows. -message ListMigrationWorkflowsRequest { - // Required. The project and location of the migration workflows to list. - // Example: `projects/123/locations/us` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; - - // The maximum number of migration workflows to return. The service may return - // fewer than this number. - int32 page_size = 3; - - // A page token, received from previous `ListMigrationWorkflows` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationWorkflows` - // must match the call that provided the page token. - string page_token = 4; -} - -// Response object for a `ListMigrationWorkflows` call. -message ListMigrationWorkflowsResponse { - // The migration workflows for the specified project / location. - repeated MigrationWorkflow migration_workflows = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a previously created migration workflow. -message DeleteMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to start a previously created migration workflow. -message StartMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to get a previously created migration subtasks. -message GetMigrationSubtaskRequest { - // Required. The unique identifier for the migration subtask. - // Example: `projects/123/locations/us/workflows/1234/subtasks/543` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to list previously created migration subtasks. -message ListMigrationSubtasksRequest { - // Required. The migration task of the subtasks to list. - // Example: `projects/123/locations/us/workflows/1234` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of migration tasks to return. The service may return - // fewer than this number. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token, received from previous `ListMigrationSubtasks` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationSubtasks` - // must match the call that provided the page token. - string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The filter to apply. This can be used to get the subtasks of a specific - // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - // task ID (not the name in the named map). - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response object for a `ListMigrationSubtasks` call. -message ListMigrationSubtasksResponse { - // The migration subtasks for the specified task. - repeated MigrationSubtask migration_subtasks = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto deleted file mode 100644 index 994140d..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto +++ /dev/null @@ -1,257 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "TranslationConfigProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// The translation config to capture necessary settings for a translation task -// and subtask. -message TranslationConfigDetails { - // The chosen path where the source for input files will be found. - oneof source_location { - // The Cloud Storage path for a directory of files to translate in a task. - string gcs_source_path = 1; - } - - // The chosen path where the destination for output files will be found. - oneof target_location { - // The Cloud Storage path to write back the corresponding input files to. - string gcs_target_path = 2; - } - - // The dialect of the input files. - Dialect source_dialect = 3; - - // The target dialect for the engine to translate the input to. - Dialect target_dialect = 4; - - // The mapping of full SQL object names from their current state to the - // desired output. - oneof output_name_mapping { - // The mapping of objects to their desired output names in list form. - ObjectNameMappingList name_mapping_list = 5; - } - - // The default source environment values for the translation. - SourceEnv source_env = 6; -} - -// The possible dialect options for translation. -message Dialect { - // The possible dialect options that this message represents. - oneof dialect_value { - // The BigQuery dialect - BigQueryDialect bigquery_dialect = 1; - - // The HiveQL dialect - HiveQLDialect hiveql_dialect = 2; - - // The Redshift dialect - RedshiftDialect redshift_dialect = 3; - - // The Teradata dialect - TeradataDialect teradata_dialect = 4; - - // The Oracle dialect - OracleDialect oracle_dialect = 5; - - // The SparkSQL dialect - SparkSQLDialect sparksql_dialect = 6; - - // The Snowflake dialect - SnowflakeDialect snowflake_dialect = 7; - - // The Netezza dialect - NetezzaDialect netezza_dialect = 8; - - // The Azure Synapse dialect - AzureSynapseDialect azure_synapse_dialect = 9; - - // The Vertica dialect - VerticaDialect vertica_dialect = 10; - - // The SQL Server dialect - SQLServerDialect sql_server_dialect = 11; - - // The Postgresql dialect - PostgresqlDialect postgresql_dialect = 12; - - // The Presto dialect - PrestoDialect presto_dialect = 13; - - // The MySQL dialect - MySQLDialect mysql_dialect = 14; - } -} - -// The dialect definition for BigQuery. -message BigQueryDialect {} - -// The dialect definition for HiveQL. -message HiveQLDialect {} - -// The dialect definition for Redshift. -message RedshiftDialect {} - -// The dialect definition for Teradata. -message TeradataDialect { - // The sub-dialect options for Teradata. - enum Mode { - // Unspecified mode. - MODE_UNSPECIFIED = 0; - - // Teradata SQL mode. - SQL = 1; - - // BTEQ mode (which includes SQL). - BTEQ = 2; - } - - // Which Teradata sub-dialect mode the user specifies. - Mode mode = 1; -} - -// The dialect definition for Oracle. -message OracleDialect {} - -// The dialect definition for SparkSQL. -message SparkSQLDialect {} - -// The dialect definition for Snowflake. -message SnowflakeDialect {} - -// The dialect definition for Netezza. -message NetezzaDialect {} - -// The dialect definition for Azure Synapse. -message AzureSynapseDialect {} - -// The dialect definition for Vertica. -message VerticaDialect {} - -// The dialect definition for SQL Server. -message SQLServerDialect {} - -// The dialect definition for Postgresql. -message PostgresqlDialect {} - -// The dialect definition for Presto. -message PrestoDialect {} - -// The dialect definition for MySQL. -message MySQLDialect {} - -// Represents a map of name mappings using a list of key:value proto messages of -// existing name to desired output name. -message ObjectNameMappingList { - // The elements of the object name map. - repeated ObjectNameMapping name_map = 1; -} - -// Represents a key-value pair of NameMappingKey to NameMappingValue to -// represent the mapping of SQL names from the input value to desired output. -message ObjectNameMapping { - // The name of the object in source that is being mapped. - NameMappingKey source = 1; - - // The desired target name of the object that is being mapped. - NameMappingValue target = 2; -} - -// The potential components of a full name mapping that will be mapped -// during translation in the source data warehouse. -message NameMappingKey { - // The type of the object that is being mapped. - enum Type { - // Unspecified name mapping type. - TYPE_UNSPECIFIED = 0; - - // The object being mapped is a database. - DATABASE = 1; - - // The object being mapped is a schema. - SCHEMA = 2; - - // The object being mapped is a relation. - RELATION = 3; - - // The object being mapped is an attribute. - ATTRIBUTE = 4; - - // The object being mapped is a relation alias. - RELATION_ALIAS = 5; - - // The object being mapped is a an attribute alias. - ATTRIBUTE_ALIAS = 6; - - // The object being mapped is a function. - FUNCTION = 7; - } - - // The type of object that is being mapped. - Type type = 1; - - // The database name (BigQuery project ID equivalent in the source data - // warehouse). - string database = 2; - - // The schema name (BigQuery dataset equivalent in the source data warehouse). - string schema = 3; - - // The relation name (BigQuery table or view equivalent in the source data - // warehouse). - string relation = 4; - - // The attribute name (BigQuery column equivalent in the source data - // warehouse). - string attribute = 5; -} - -// The potential components of a full name mapping that will be mapped -// during translation in the target data warehouse. -message NameMappingValue { - // The database name (BigQuery project ID equivalent in the target data - // warehouse). - string database = 1; - - // The schema name (BigQuery dataset equivalent in the target data warehouse). - string schema = 2; - - // The relation name (BigQuery table or view equivalent in the target data - // warehouse). - string relation = 3; - - // The attribute name (BigQuery column equivalent in the target data - // warehouse). - string attribute = 4; -} - -// Represents the default source environment values for the translation. -message SourceEnv { - // The default database name to fully qualify SQL objects when their database - // name is missing. - string default_database = 1; - - // The schema search path. When SQL objects are missing schema name, - // translation engine will search through this list to find the value. - repeated string schema_search_path = 2; -} diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js deleted file mode 100644 index 8301c3a..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, migrationWorkflow) { - // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - */ - // const parent = 'abc123' - /** - * Required. The migration workflow to create. - */ - // const migrationWorkflow = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callCreateMigrationWorkflow() { - // Construct request - const request = { - parent, - migrationWorkflow, - }; - - // Run request - const response = await migrationClient.createMigrationWorkflow(request); - console.log(response); - } - - callCreateMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js deleted file mode 100644 index 9f0651e..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callDeleteMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.deleteMigrationWorkflow(request); - console.log(response); - } - - callDeleteMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js deleted file mode 100644 index 25de9e0..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - */ - // const name = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationSubtask() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationSubtask(request); - console.log(response); - } - - callGetMigrationSubtask(); - // [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js deleted file mode 100644 index 52ab5cd..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationWorkflow(request); - console.log(response); - } - - callGetMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js deleted file mode 100644 index c5c7ed0..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const parent = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - /** - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - /** - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - */ - // const filter = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationSubtasks() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationSubtasksAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationSubtasks(); - // [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js deleted file mode 100644 index ebd2127..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - */ - // const parent = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - /** - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationWorkflows() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationWorkflowsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationWorkflows(); - // [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js deleted file mode 100644 index 7f8257d..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callStartMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.startMigrationWorkflow(request); - console.log(response); - } - - callStartMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json deleted file mode 100644 index 81ec8bb..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json +++ /dev/null @@ -1,335 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-migration", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2", - "version": "v2" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", - "title": "MigrationService createMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Creates a migration workflow.", - "canonical": true, - "file": "migration_service.create_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 59, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "migration_workflow", - "type": ".google.cloud.bigquery.migration.v2.MigrationWorkflow" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", - "title": "MigrationService getMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration workflow.", - "canonical": true, - "file": "migration_service.get_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", - "title": "MigrationService listMigrationWorkflows Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration workflow.", - "canonical": true, - "file": "migration_service.list_migration_workflows.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 72, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", - "title": "MigrationService deleteMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Deletes a migration workflow by name.", - "canonical": true, - "file": "migration_service.delete_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", - "title": "MigrationService startMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", - "canonical": true, - "file": "migration_service.start_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", - "title": "MigrationService getMigrationSubtask Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration subtask.", - "canonical": true, - "file": "migration_service.get_migration_subtask.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationSubtask", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", - "title": "MigrationService listMigrationSubtasks Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration subtasks.", - "canonical": true, - "file": "migration_service.list_migration_subtasks.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 78, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v2/src/index.ts b/owl-bot-staging/v2/src/index.ts deleted file mode 100644 index 35a8fd9..0000000 --- a/owl-bot-staging/v2/src/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v2 from './v2'; -const MigrationServiceClient = v2.MigrationServiceClient; -type MigrationServiceClient = v2.MigrationServiceClient; -export {v2, MigrationServiceClient}; -export default {v2, MigrationServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v2/src/v2/gapic_metadata.json b/owl-bot-staging/v2/src/v2/gapic_metadata.json deleted file mode 100644 index 1b6a33c..0000000 --- a/owl-bot-staging/v2/src/v2/gapic_metadata.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.migration.v2", - "libraryPackage": "@google-cloud/bigquery-migration", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/src/v2/index.ts b/owl-bot-staging/v2/src/v2/index.ts deleted file mode 100644 index f75d208..0000000 --- a/owl-bot-staging/v2/src/v2/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2/src/v2/migration_service_client.ts b/owl-bot-staging/v2/src/v2/migration_service_client.ts deleted file mode 100644 index d99d7a2..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_client.ts +++ /dev/null @@ -1,1246 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; - -import {Transform} from 'stream'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v2/migration_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './migration_service_client_config.json'; - -const version = require('../../../package.json').version; - -/** - * Service to handle EDW migrations. - * @class - * @memberof v2 - */ -export class MigrationServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - migrationServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of MigrationServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof MigrationServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' - ), - migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listMigrationWorkflows: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), - listMigrationSubtasks: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.migration.v2.MigrationService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.migrationServiceStub) { - return this.migrationServiceStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.migration.v2.MigrationService. - this.migrationServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2.MigrationService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.migration.v2.MigrationService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const migrationServiceStubMethods = - ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; - for (const methodName of migrationServiceStubMethods) { - const callPromise = this.migrationServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor, - this._opts.fallback - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.migrationServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates a migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - * @param {google.cloud.bigquery.migration.v2.MigrationWorkflow} request.migrationWorkflow - * Required. The migration workflow to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.create_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async - */ - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.get_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async - */ - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationWorkflow(request, options, callback); - } -/** - * Deletes a migration workflow by name. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.delete_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async - */ - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); - } -/** - * Starts a previously created migration workflow. I.e., the state transitions - * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - * An error will be signaled if the state is anything other than DRAFT or - * RUNNING. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.start_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async - */ - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.startMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration subtask. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.get_migration_subtask.js - * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async - */ - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationSubtask(request, options, callback); - } - - /** - * Lists previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse - ]>; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationWorkflows(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflowsStream( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationWorkflows`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.list_migration_workflows.js - * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async - */ - listMigrationWorkflowsAsync( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.asyncIterate( - this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - /** - * Lists previously created migration subtasks. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse - ]>; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationSubtasks(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasksStream( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationSubtasks`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.list_migration_subtasks.js - * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async - */ - listMigrationSubtasksAsync( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.asyncIterate( - this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified migrationSubtask resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @param {string} subtask - * @returns {string} Resource name string. - */ - migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { - return this.pathTemplates.migrationSubtaskPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - subtask: subtask, - }); - } - - /** - * Parse the project from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; - } - - /** - * Parse the location from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; - } - - /** - * Parse the workflow from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; - } - - /** - * Parse the subtask from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the subtask. - */ - matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; - } - - /** - * Return a fully-qualified migrationWorkflow resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @returns {string} Resource name string. - */ - migrationWorkflowPath(project:string,location:string,workflow:string) { - return this.pathTemplates.migrationWorkflowPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - }); - } - - /** - * Parse the project from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; - } - - /** - * Parse the location from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; - } - - /** - * Parse the workflow from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.migrationServiceStub && !this._terminated) { - return this.migrationServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v2/src/v2/migration_service_client_config.json b/owl-bot-staging/v2/src/v2/migration_service_client_config.json deleted file mode 100644 index 5832815..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_client_config.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.migration.v2.MigrationService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationWorkflows": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "DeleteMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "GetMigrationSubtask": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "ListMigrationSubtasks": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json deleted file mode 100644 index 57df7ab..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/migration/v2/migration_entities.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_error_details.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_metrics.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_service.proto", - "../../protos/google/cloud/bigquery/migration/v2/translation_config.proto" -] diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js deleted file mode 100644 index ecc7e4b..0000000 --- a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/bigquery-migration'); - -function main() { - const migrationServiceClient = new migration.MigrationServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 80fbe2d..0000000 --- a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; - -// check that the client class type name can be used -function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const migrationServiceClient = new MigrationServiceClient(); - doStuffWithMigrationServiceClient(migrationServiceClient); -} - -main(); diff --git a/owl-bot-staging/v2/system-test/install.ts b/owl-bot-staging/v2/system-test/install.ts deleted file mode 100644 index 557a575..0000000 --- a/owl-bot-staging/v2/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {packNTest} from 'pack-n-play'; -import {readFileSync} from 'fs'; -import {describe, it} from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts deleted file mode 100644 index 061c58c..0000000 --- a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts +++ /dev/null @@ -1,1256 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import {describe, it} from 'mocha'; -import * as migrationserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v2.MigrationServiceClient', () => { - describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = migrationserviceModule.v2.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - }); - - describe('createMigrationWorkflow', () => { - it('invokes createMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.createMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationWorkflow', () => { - it('invokes getMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - }); - }); - - describe('deleteMigrationWorkflow', () => { - it('invokes deleteMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.deleteMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - }); - }); - - describe('startMigrationWorkflow', () => { - it('invokes startMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.startMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationSubtask', () => { - it('invokes getMigrationSubtask without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationSubtask(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationSubtask( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationSubtask with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - }); - }); - - describe('listMigrationWorkflows', () => { - it('invokes listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationWorkflows(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflows without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationWorkflows( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationWorkflows(request), expectedError); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflowsStream without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationWorkflowsStream with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; - const iterable = client.listMigrationWorkflowsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationWorkflowsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('listMigrationSubtasks', () => { - it('invokes listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationSubtasks(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasks without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationSubtasks( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationSubtasks(request), expectedError); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasksStream without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationSubtasksStream with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; - const iterable = client.listMigrationSubtasksAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationSubtasksAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationSubtask', () => { - const fakePath = "/rendered/path/migrationSubtask"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - subtask: "subtaskValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationSubtaskPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationSubtaskPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationSubtaskPath', () => { - const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationSubtaskName', () => { - const result = client.matchProjectFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationSubtaskName', () => { - const result = client.matchLocationFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationSubtaskName', () => { - const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchSubtaskFromMigrationSubtaskName', () => { - const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "subtaskValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationWorkflow', () => { - const fakePath = "/rendered/path/migrationWorkflow"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationWorkflowPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationWorkflowPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationWorkflowPath', () => { - const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationWorkflowName', () => { - const result = client.matchProjectFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationWorkflowName', () => { - const result = client.matchLocationFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationWorkflowName', () => { - const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v2/tsconfig.json b/owl-bot-staging/v2/tsconfig.json deleted file mode 100644 index c78f1c8..0000000 --- a/owl-bot-staging/v2/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v2/webpack.config.js b/owl-bot-staging/v2/webpack.config.js deleted file mode 100644 index 9178c4b..0000000 --- a/owl-bot-staging/v2/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'MigrationService', - filename: './migration-service.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -}; diff --git a/owl-bot-staging/v2alpha/.eslintignore b/owl-bot-staging/v2alpha/.eslintignore deleted file mode 100644 index cfc348e..0000000 --- a/owl-bot-staging/v2alpha/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v2alpha/.eslintrc.json b/owl-bot-staging/v2alpha/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/owl-bot-staging/v2alpha/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v2alpha/.gitignore b/owl-bot-staging/v2alpha/.gitignore deleted file mode 100644 index 5d32b23..0000000 --- a/owl-bot-staging/v2alpha/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v2alpha/.jsdoc.js b/owl-bot-staging/v2alpha/.jsdoc.js deleted file mode 100644 index c3c1e3d..0000000 --- a/owl-bot-staging/v2alpha/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/bigquery-migration', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v2alpha/.mocharc.js b/owl-bot-staging/v2alpha/.mocharc.js deleted file mode 100644 index 481c522..0000000 --- a/owl-bot-staging/v2alpha/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v2alpha/.prettierrc.js b/owl-bot-staging/v2alpha/.prettierrc.js deleted file mode 100644 index 494e147..0000000 --- a/owl-bot-staging/v2alpha/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v2alpha/README.md b/owl-bot-staging/v2alpha/README.md deleted file mode 100644 index 4e0341c..0000000 --- a/owl-bot-staging/v2alpha/README.md +++ /dev/null @@ -1 +0,0 @@ -Migration: Nodejs Client diff --git a/owl-bot-staging/v2alpha/linkinator.config.json b/owl-bot-staging/v2alpha/linkinator.config.json deleted file mode 100644 index befd23c..0000000 --- a/owl-bot-staging/v2alpha/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v2alpha/package.json b/owl-bot-staging/v2alpha/package.json deleted file mode 100644 index 407e1b9..0000000 --- a/owl-bot-staging/v2alpha/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "@google-cloud/bigquery-migration", - "version": "0.1.0", - "description": "Migration client for Node.js", - "repository": "googleapis/nodejs-migration", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google migration", - "migration", - "migration service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^3.2.0" - }, - "devDependencies": { - "@types/mocha": "^9.1.1", - "@types/node": "^16.11.50", - "@types/sinon": "^10.0.13", - "c8": "^7.12.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.11", - "jsdoc-fresh": "^2.0.0", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^4.0.2", - "mocha": "^10.0.0", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^8.4.0", - "typescript": "^4.7.4", - "webpack": "^4.46.0", - "webpack-cli": "^4.10.0" - }, - "engines": { - "node": ">=v12" - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto deleted file mode 100644 index 0c6ea13..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "AssessmentTaskProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Assessment task config. -message AssessmentTaskDetails { - // Required. The Cloud Storage path for assessment input files. - string input_path = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The BigQuery dataset for output. - string output_dataset = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. An optional Cloud Storage path to write the query logs (which is - // then used as an input path on the translation task) - string querylogs_path = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data source or data warehouse type (eg: TERADATA/REDSHIFT) - // from which the input data is extracted. - string data_source = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// Details for an assessment task orchestration result. -message AssessmentOrchestrationResultDetails { - // Optional. The version used for the output table schemas. - string output_tables_schema_version = 1 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto deleted file mode 100644 index 50d4c75..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto +++ /dev/null @@ -1,244 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2alpha/assessment_task.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; -import "google/cloud/bigquery/migration/v2alpha/translation_task.proto"; -import "google/protobuf/any.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationEntitiesProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// A migration workflow which specifies what needs to be done for an EDW -// migration. -message MigrationWorkflow { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}" - }; - - // Possible migration workflow states. - enum State { - // Workflow state is unspecified. - STATE_UNSPECIFIED = 0; - - // Workflow is in draft status, i.e. tasks are not yet eligible for - // execution. - DRAFT = 1; - - // Workflow is running (i.e. tasks are eligible for execution). - RUNNING = 2; - - // Workflow is paused. Tasks currently in progress may continue, but no - // further tasks will be scheduled. - PAUSED = 3; - - // Workflow is complete. There should not be any task in a non-terminal - // state, but if they are (e.g. forced termination), they will not be - // scheduled. - COMPLETED = 4; - } - - // Output only. Immutable. The unique identifier for the migration workflow. The ID is - // server-generated. - // - // Example: `projects/123/locations/us/workflows/345` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The display name of the workflow. This can be set to give a workflow - // a descriptive name. There is no guarantee or enforcement of uniqueness. - string display_name = 6; - - // The tasks in a workflow in a named map. The name (i.e. key) has no - // meaning and is merely a convenient way to address a specific task - // in a workflow. - map tasks = 2; - - // Output only. That status of the workflow. - State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the workflow was created. - google.protobuf.Timestamp create_time = 4; - - // Time when the workflow was last updated. - google.protobuf.Timestamp last_update_time = 5; -} - -// A single task for a migration which has details about the configuration of -// the task. -message MigrationTask { - // Possible states of a migration task. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The task is waiting for orchestration. - PENDING = 1; - - // The task is assigned to an orchestrator. - ORCHESTRATING = 2; - - // The task is running, i.e. its subtasks are ready for execution. - RUNNING = 3; - - // Tha task is paused. Assigned subtasks can continue, but no new subtasks - // will be scheduled. - PAUSED = 4; - - // The task finished successfully. - SUCCEEDED = 5; - - // The task finished unsuccessfully. - FAILED = 6; - } - - // The details of the task. - oneof task_details { - // Task configuration for Assessment. - AssessmentTaskDetails assessment_task_details = 12; - - // Task configuration for Batch/Offline SQL Translation. - TranslationTaskDetails translation_task_details = 13; - } - - // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. - string id = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The type of the task. This must be a supported task type. - string type = 2; - - // DEPRECATED! Use one of the task_details below. - // The details of the task. The type URL must be one of the supported task - // details messages and correspond to the Task's type. - google.protobuf.Any details = 3; - - // Output only. The current state of the task. - State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in FAILED state. - google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the task was created. - google.protobuf.Timestamp create_time = 6; - - // Time when the task was last updated. - google.protobuf.Timestamp last_update_time = 7; - - // Output only. Additional information about the orchestration. - MigrationTaskOrchestrationResult orchestration_result = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A subtask for a migration which carries details about the configuration of -// the subtask. The content of the details should not matter to the end user, -// but is a contract between the subtask creator and subtask worker. -message MigrationSubtask { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" - }; - - // Possible states of a migration subtask. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The subtask is ready, i.e. it is ready for execution. - ACTIVE = 1; - - // The subtask is running, i.e. it is assigned to a worker for execution. - RUNNING = 2; - - // The subtask finished successfully. - SUCCEEDED = 3; - - // The subtask finished unsuccessfully. - FAILED = 4; - - // The subtask is paused, i.e., it will not be scheduled. If it was already - // assigned,it might still finish but no new lease renewals will be granted. - PAUSED = 5; - } - - // Output only. Immutable. The resource name for the migration subtask. The ID is - // server-generated. - // - // Example: `projects/123/locations/us/workflows/345/subtasks/678` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The unique ID of the task to which this subtask belongs. - string task_id = 2; - - // The type of the Subtask. The migration service does not check whether this - // is a known type. It is up to the task creator (i.e. orchestrator or worker) - // to ensure it only creates subtasks for which there are compatible workers - // polling for Subtasks. - string type = 3; - - // Output only. The current state of the subtask. - State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in FAILED state. - google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Provides details to errors and issues encountered while processing the - // subtask. Presence of error details does not mean that the subtask failed. - repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The number or resources with errors. Note: This is not the total - // number of errors as each resource can have more than one error. - // This is used to indicate truncation by having a `resource_error_count` - // that is higher than the size of `resource_error_details`. - int32 resource_error_count = 13; - - // Time when the subtask was created. - google.protobuf.Timestamp create_time = 7; - - // Time when the subtask was last updated. - google.protobuf.Timestamp last_update_time = 8; - - // The metrics for the subtask. - repeated TimeSeries metrics = 11; -} - -// Additional information from the orchestrator when it is done with the -// task orchestration. -message MigrationTaskOrchestrationResult { - // Details specific to the task type. - oneof details { - // Details specific to assessment task types. - AssessmentOrchestrationResultDetails assessment_details = 1; - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto deleted file mode 100644 index 89dac5e..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationErrorDetailsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Provides details for errors and the corresponding resources. -message ResourceErrorDetail { - // Required. Information about the resource where the error is located. - google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The error details for the resource. - repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. How many errors there are in total for the resource. Truncation can be - // indicated by having an `error_count` that is higher than the size of - // `error_details`. - int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Provides details for errors, e.g. issues that where encountered when -// processing a subtask. -message ErrorDetail { - // Optional. The exact location within the resource (if applicable). - ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Describes the cause of the error with structured detail. - google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Holds information about where the error is located. -message ErrorLocation { - // Optional. If applicable, denotes the line where the error occurred. A zero value - // means that there is no line information. - int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If applicable, denotes the column where the error occurred. A zero value - // means that there is no columns information. - int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto deleted file mode 100644 index ce60dd2..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/distribution.proto"; -import "google/api/field_behavior.proto"; -import "google/api/metric.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationMetricsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// The metrics object for a SubTask. -message TimeSeries { - // Required. The name of the metric. - // - // If the metric is not known by the service yet, it will be auto-created. - string metric = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The value type of the time series. - google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric kind of the time series. - // - // If present, it must be the same as the metric kind of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // this field specifies the metric kind of the new descriptor and must be - // either `GAUGE` (the default) or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time does not need to be supplied, but if it is supplied, it must - // equal the end time. For `DELTA` metrics, the start and end time should - // specify a non-zero interval, with subsequent points specifying contiguous - // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end - // time should specify a non-zero interval, with subsequent points specifying - // the same start time and increasing end times, until an event resets the - // cumulative value to zero and sets a new start time for the following - // points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A time interval extending just after a start time through an end time. -// If the start time is the same as the end time, then the interval -// represents a single point in time. -message TimeInterval { - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately +/-9.2x10^18. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately +/-10^(+/-300) and it has 16 significant digits of - // precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto deleted file mode 100644 index 9a184a1..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto +++ /dev/null @@ -1,247 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationServiceProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Service to handle EDW migrations. -service MigrationService { - option (google.api.default_host) = "bigquerymigration.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a migration workflow. - rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - post: "/v2alpha/{parent=projects/*/locations/*}/workflows" - body: "migration_workflow" - }; - option (google.api.method_signature) = "parent,migration_workflow"; - } - - // Gets a previously created migration workflow. - rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration workflow. - rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { - option (google.api.http) = { - get: "/v2alpha/{parent=projects/*/locations/*}/workflows" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a migration workflow by name. - rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Starts a previously created migration workflow. I.e., the state transitions - // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - // An error will be signaled if the state is anything other than DRAFT or - // RUNNING. - rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Gets a previously created migration subtask. - rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { - option (google.api.http) = { - get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration subtasks. - rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { - option (google.api.http) = { - get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" - }; - option (google.api.method_signature) = "parent"; - } -} - -// Request to create a migration workflow resource. -message CreateMigrationWorkflowRequest { - // Required. The name of the project to which this migration workflow belongs. - // Example: `projects/foo/locations/bar` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // Required. The migration workflow to create. - MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to get a previously created migration workflow. -message GetMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; -} - -// A request to list previously created migration workflows. -message ListMigrationWorkflowsRequest { - // Required. The project and location of the migration workflows to list. - // Example: `projects/123/locations/us` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; - - // The maximum number of migration workflows to return. The service may return - // fewer than this number. - int32 page_size = 3; - - // A page token, received from previous `ListMigrationWorkflows` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationWorkflows` - // must match the call that provided the page token. - string page_token = 4; -} - -// Response object for a `ListMigrationWorkflows` call. -message ListMigrationWorkflowsResponse { - // The migration workflows for the specified project / location. - repeated MigrationWorkflow migration_workflows = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a previously created migration workflow. -message DeleteMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to start a previously created migration workflow. -message StartMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to get a previously created migration subtasks. -message GetMigrationSubtaskRequest { - // Required. The unique identifier for the migration subtask. - // Example: `projects/123/locations/us/workflows/1234/subtasks/543` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to list previously created migration subtasks. -message ListMigrationSubtasksRequest { - // Required. The migration task of the subtasks to list. - // Example: `projects/123/locations/us/workflows/1234` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of migration tasks to return. The service may return - // fewer than this number. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token, received from previous `ListMigrationSubtasks` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationSubtasks` - // must match the call that provided the page token. - string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The filter to apply. This can be used to get the subtasks of a specific - // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - // task ID (not the name in the named map). - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response object for a `ListMigrationSubtasks` call. -message ListMigrationSubtasksResponse { - // The migration subtasks for the specified task. - repeated MigrationSubtask migration_subtasks = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto deleted file mode 100644 index bf4b27e..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "TranslationTaskProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Mapping between an input and output file to be translated in a subtask. -message TranslationFileMapping { - // The Cloud Storage path for a file to translation in a subtask. - string input_path = 1; - - // The Cloud Storage path to write back the corresponding input file to. - string output_path = 2; -} - -// The translation task config to capture necessary settings for a translation -// task and subtask. -message TranslationTaskDetails { - // The file encoding types. - enum FileEncoding { - // File encoding setting is not specified. - FILE_ENCODING_UNSPECIFIED = 0; - - // File encoding is UTF_8. - UTF_8 = 1; - - // File encoding is ISO_8859_1. - ISO_8859_1 = 2; - - // File encoding is US_ASCII. - US_ASCII = 3; - - // File encoding is UTF_16. - UTF_16 = 4; - - // File encoding is UTF_16LE. - UTF_16LE = 5; - - // File encoding is UTF_16BE. - UTF_16BE = 6; - } - - // The special token data type. - enum TokenType { - // Token type is not specified. - TOKEN_TYPE_UNSPECIFIED = 0; - - // Token type as string. - STRING = 1; - - // Token type as integer. - INT64 = 2; - - // Token type as numeric. - NUMERIC = 3; - - // Token type as boolean. - BOOL = 4; - - // Token type as float. - FLOAT64 = 5; - - // Token type as date. - DATE = 6; - - // Token type as timestamp. - TIMESTAMP = 7; - } - - // The language specific settings for the translation task. - oneof language_options { - // The Teradata SQL specific settings for the translation task. - TeradataOptions teradata_options = 10; - - // The BTEQ specific settings for the translation task. - BteqOptions bteq_options = 11; - } - - // The Cloud Storage path for translation input files. - string input_path = 1; - - // The Cloud Storage path for translation output files. - string output_path = 2; - - // Cloud Storage files to be processed for translation. - repeated TranslationFileMapping file_paths = 12; - - // The Cloud Storage path to DDL files as table schema to assist semantic - // translation. - string schema_path = 3; - - // The file encoding type. - FileEncoding file_encoding = 4; - - // The settings for SQL identifiers. - IdentifierSettings identifier_settings = 5; - - // The map capturing special tokens to be replaced during translation. The key - // is special token in string. The value is the token data type. This is used - // to translate SQL query template which contains special token as place - // holder. The special token makes a query invalid to parse. This map will be - // applied to annotate those special token with types to let parser understand - // how to parse them into proper structure with type information. - map special_token_map = 6; - - // The filter applied to translation details. - Filter filter = 7; - - // Specifies the exact name of the bigquery table ("dataset.table") to be used - // for surfacing raw translation errors. If the table does not exist, we will - // create it. If it already exists and the schema is the same, we will re-use. - // If the table exists and the schema is different, we will throw an error. - string translation_exception_table = 13; -} - -// The filter applied to fields of translation details. -message Filter { - // The list of prefixes used to exclude processing for input files. - repeated string input_file_exclusion_prefixes = 1; -} - -// Settings related to SQL identifiers. -message IdentifierSettings { - // The identifier case type. - enum IdentifierCase { - // The identifier case is not specified. - IDENTIFIER_CASE_UNSPECIFIED = 0; - - // Identifiers' cases will be kept as the original cases. - ORIGINAL = 1; - - // Identifiers will be in upper cases. - UPPER = 2; - - // Identifiers will be in lower cases. - LOWER = 3; - } - - // The SQL identifier rewrite mode. - enum IdentifierRewriteMode { - // SQL Identifier rewrite mode is unspecified. - IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0; - - // SQL identifiers won't be rewrite. - NONE = 1; - - // All SQL identifiers will be rewrite. - REWRITE_ALL = 2; - } - - // The setting to control output queries' identifier case. - IdentifierCase output_identifier_case = 1; - - // Specifies the rewrite mode for SQL identifiers. - IdentifierRewriteMode identifier_rewrite_mode = 2; -} - -// Teradata SQL specific translation task related settings. -message TeradataOptions { - -} - -// BTEQ translation task related settings. -message BteqOptions { - // Specifies the project and dataset in BigQuery that will be used for - // external table creation during the translation. - DatasetReference project_dataset = 1; - - // The Cloud Storage location to be used as the default path for files that - // are not otherwise specified in the file replacement map. - string default_path_uri = 2; - - // Maps the local paths that are used in BTEQ scripts (the keys) to the paths - // in Cloud Storage that should be used in their stead in the translation (the - // value). - map file_replacement_map = 3; -} - -// Reference to a BigQuery dataset. -message DatasetReference { - // A unique ID for this dataset, without the project name. The ID - // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). - // The maximum length is 1,024 characters. - string dataset_id = 1; - - // The ID of the project containing this dataset. - string project_id = 2; -} diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js deleted file mode 100644 index d8613fe..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, migrationWorkflow) { - // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - */ - // const parent = 'abc123' - /** - * Required. The migration workflow to create. - */ - // const migrationWorkflow = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callCreateMigrationWorkflow() { - // Construct request - const request = { - parent, - migrationWorkflow, - }; - - // Run request - const response = await migrationClient.createMigrationWorkflow(request); - console.log(response); - } - - callCreateMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js deleted file mode 100644 index e17c7ee..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callDeleteMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.deleteMigrationWorkflow(request); - console.log(response); - } - - callDeleteMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js deleted file mode 100644 index a17add1..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - */ - // const name = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationSubtask() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationSubtask(request); - console.log(response); - } - - callGetMigrationSubtask(); - // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js deleted file mode 100644 index 1519196..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationWorkflow(request); - console.log(response); - } - - callGetMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js deleted file mode 100644 index 99419dd..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const parent = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - /** - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - /** - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - */ - // const filter = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationSubtasks() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationSubtasksAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationSubtasks(); - // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js deleted file mode 100644 index 31c0263..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - */ - // const parent = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - /** - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationWorkflows() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationWorkflowsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationWorkflows(); - // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js deleted file mode 100644 index 85288c6..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callStartMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.startMigrationWorkflow(request); - console.log(response); - } - - callStartMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json deleted file mode 100644 index c5336c2..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json +++ /dev/null @@ -1,335 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-migration", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2alpha", - "version": "v2alpha" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", - "title": "MigrationService createMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Creates a migration workflow.", - "canonical": true, - "file": "migration_service.create_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 59, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "migration_workflow", - "type": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", - "title": "MigrationService getMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration workflow.", - "canonical": true, - "file": "migration_service.get_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", - "title": "MigrationService listMigrationWorkflows Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration workflow.", - "canonical": true, - "file": "migration_service.list_migration_workflows.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 72, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", - "title": "MigrationService deleteMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Deletes a migration workflow by name.", - "canonical": true, - "file": "migration_service.delete_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", - "title": "MigrationService startMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", - "canonical": true, - "file": "migration_service.start_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", - "title": "MigrationService getMigrationSubtask Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration subtask.", - "canonical": true, - "file": "migration_service.get_migration_subtask.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationSubtask", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", - "title": "MigrationService listMigrationSubtasks Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration subtasks.", - "canonical": true, - "file": "migration_service.list_migration_subtasks.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 78, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v2alpha/src/index.ts b/owl-bot-staging/v2alpha/src/index.ts deleted file mode 100644 index 288e629..0000000 --- a/owl-bot-staging/v2alpha/src/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v2alpha from './v2alpha'; -const MigrationServiceClient = v2alpha.MigrationServiceClient; -type MigrationServiceClient = v2alpha.MigrationServiceClient; -export {v2alpha, MigrationServiceClient}; -export default {v2alpha, MigrationServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json deleted file mode 100644 index adf8d06..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.migration.v2alpha", - "libraryPackage": "@google-cloud/bigquery-migration", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/index.ts b/owl-bot-staging/v2alpha/src/v2alpha/index.ts deleted file mode 100644 index f75d208..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts deleted file mode 100644 index 0fbbe62..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts +++ /dev/null @@ -1,1246 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; - -import {Transform} from 'stream'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v2alpha/migration_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './migration_service_client_config.json'; - -const version = require('../../../package.json').version; - -/** - * Service to handle EDW migrations. - * @class - * @memberof v2alpha - */ -export class MigrationServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - migrationServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of MigrationServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof MigrationServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' - ), - migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listMigrationWorkflows: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), - listMigrationSubtasks: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.migration.v2alpha.MigrationService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.migrationServiceStub) { - return this.migrationServiceStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.migration.v2alpha.MigrationService. - this.migrationServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2alpha.MigrationService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.migration.v2alpha.MigrationService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const migrationServiceStubMethods = - ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; - for (const methodName of migrationServiceStubMethods) { - const callPromise = this.migrationServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor, - this._opts.fallback - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.migrationServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates a migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - * @param {google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} request.migrationWorkflow - * Required. The migration workflow to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.create_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async - */ - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.get_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async - */ - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationWorkflow(request, options, callback); - } -/** - * Deletes a migration workflow by name. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.delete_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async - */ - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); - } -/** - * Starts a previously created migration workflow. I.e., the state transitions - * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - * An error will be signaled if the state is anything other than DRAFT or - * RUNNING. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.start_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async - */ - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.startMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration subtask. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.get_migration_subtask.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async - */ - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationSubtask(request, options, callback); - } - - /** - * Lists previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse - ]>; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationWorkflows(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflowsStream( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationWorkflows`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.list_migration_workflows.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async - */ - listMigrationWorkflowsAsync( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.asyncIterate( - this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - /** - * Lists previously created migration subtasks. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse - ]>; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationSubtasks(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasksStream( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationSubtasks`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.list_migration_subtasks.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async - */ - listMigrationSubtasksAsync( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.asyncIterate( - this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified migrationSubtask resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @param {string} subtask - * @returns {string} Resource name string. - */ - migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { - return this.pathTemplates.migrationSubtaskPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - subtask: subtask, - }); - } - - /** - * Parse the project from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; - } - - /** - * Parse the location from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; - } - - /** - * Parse the workflow from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; - } - - /** - * Parse the subtask from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the subtask. - */ - matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; - } - - /** - * Return a fully-qualified migrationWorkflow resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @returns {string} Resource name string. - */ - migrationWorkflowPath(project:string,location:string,workflow:string) { - return this.pathTemplates.migrationWorkflowPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - }); - } - - /** - * Parse the project from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; - } - - /** - * Parse the location from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; - } - - /** - * Parse the workflow from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.migrationServiceStub && !this._terminated) { - return this.migrationServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json deleted file mode 100644 index 2184b83..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.migration.v2alpha.MigrationService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetMigrationWorkflow": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationWorkflows": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "DeleteMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartMigrationWorkflow": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "GetMigrationSubtask": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationSubtasks": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - } - } - } - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json deleted file mode 100644 index 8e91e42..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto" -] diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js deleted file mode 100644 index ecc7e4b..0000000 --- a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/bigquery-migration'); - -function main() { - const migrationServiceClient = new migration.MigrationServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 80fbe2d..0000000 --- a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; - -// check that the client class type name can be used -function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const migrationServiceClient = new MigrationServiceClient(); - doStuffWithMigrationServiceClient(migrationServiceClient); -} - -main(); diff --git a/owl-bot-staging/v2alpha/system-test/install.ts b/owl-bot-staging/v2alpha/system-test/install.ts deleted file mode 100644 index 557a575..0000000 --- a/owl-bot-staging/v2alpha/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {packNTest} from 'pack-n-play'; -import {readFileSync} from 'fs'; -import {describe, it} from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts deleted file mode 100644 index 99aac57..0000000 --- a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts +++ /dev/null @@ -1,1256 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import {describe, it} from 'mocha'; -import * as migrationserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v2alpha.MigrationServiceClient', () => { - describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - }); - - describe('createMigrationWorkflow', () => { - it('invokes createMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.createMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationWorkflow', () => { - it('invokes getMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - }); - }); - - describe('deleteMigrationWorkflow', () => { - it('invokes deleteMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.deleteMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - }); - }); - - describe('startMigrationWorkflow', () => { - it('invokes startMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.startMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationSubtask', () => { - it('invokes getMigrationSubtask without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationSubtask(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationSubtask( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationSubtask with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - }); - }); - - describe('listMigrationWorkflows', () => { - it('invokes listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationWorkflows(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflows without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationWorkflows( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationWorkflows(request), expectedError); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflowsStream without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationWorkflowsStream with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; - const iterable = client.listMigrationWorkflowsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationWorkflowsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('listMigrationSubtasks', () => { - it('invokes listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationSubtasks(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasks without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationSubtasks( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationSubtasks(request), expectedError); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasksStream without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationSubtasksStream with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; - const iterable = client.listMigrationSubtasksAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationSubtasksAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationSubtask', () => { - const fakePath = "/rendered/path/migrationSubtask"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - subtask: "subtaskValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationSubtaskPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationSubtaskPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationSubtaskPath', () => { - const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationSubtaskName', () => { - const result = client.matchProjectFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationSubtaskName', () => { - const result = client.matchLocationFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationSubtaskName', () => { - const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchSubtaskFromMigrationSubtaskName', () => { - const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "subtaskValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationWorkflow', () => { - const fakePath = "/rendered/path/migrationWorkflow"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationWorkflowPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationWorkflowPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationWorkflowPath', () => { - const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationWorkflowName', () => { - const result = client.matchProjectFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationWorkflowName', () => { - const result = client.matchLocationFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationWorkflowName', () => { - const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v2alpha/tsconfig.json b/owl-bot-staging/v2alpha/tsconfig.json deleted file mode 100644 index c78f1c8..0000000 --- a/owl-bot-staging/v2alpha/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v2alpha/webpack.config.js b/owl-bot-staging/v2alpha/webpack.config.js deleted file mode 100644 index 9178c4b..0000000 --- a/owl-bot-staging/v2alpha/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'MigrationService', - filename: './migration-service.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -}; diff --git a/samples/generated/v2/migration_service.create_migration_workflow.js b/samples/generated/v2/migration_service.create_migration_workflow.js index 817b25b..8301c3a 100644 --- a/samples/generated/v2/migration_service.create_migration_workflow.js +++ b/samples/generated/v2/migration_service.create_migration_workflow.js @@ -39,7 +39,7 @@ function main(parent, migrationWorkflow) { // const migrationWorkflow = {} // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2/migration_service.delete_migration_workflow.js b/samples/generated/v2/migration_service.delete_migration_workflow.js index b3907a6..9f0651e 100644 --- a/samples/generated/v2/migration_service.delete_migration_workflow.js +++ b/samples/generated/v2/migration_service.delete_migration_workflow.js @@ -35,7 +35,7 @@ function main(name) { // const name = 'abc123' // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2/migration_service.get_migration_subtask.js b/samples/generated/v2/migration_service.get_migration_subtask.js index 949eb5a..25de9e0 100644 --- a/samples/generated/v2/migration_service.get_migration_subtask.js +++ b/samples/generated/v2/migration_service.get_migration_subtask.js @@ -39,7 +39,7 @@ function main(name) { // const readMask = {} // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2/migration_service.get_migration_workflow.js b/samples/generated/v2/migration_service.get_migration_workflow.js index 39e79e3..52ab5cd 100644 --- a/samples/generated/v2/migration_service.get_migration_workflow.js +++ b/samples/generated/v2/migration_service.get_migration_workflow.js @@ -39,7 +39,7 @@ function main(name) { // const readMask = {} // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2/migration_service.list_migration_subtasks.js b/samples/generated/v2/migration_service.list_migration_subtasks.js index 4a63df1..c5c7ed0 100644 --- a/samples/generated/v2/migration_service.list_migration_subtasks.js +++ b/samples/generated/v2/migration_service.list_migration_subtasks.js @@ -57,7 +57,7 @@ function main(parent) { // const filter = 'abc123' // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2/migration_service.list_migration_workflows.js b/samples/generated/v2/migration_service.list_migration_workflows.js index 66dc66f..ebd2127 100644 --- a/samples/generated/v2/migration_service.list_migration_workflows.js +++ b/samples/generated/v2/migration_service.list_migration_workflows.js @@ -51,7 +51,7 @@ function main(parent) { // const pageToken = 'abc123' // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2/migration_service.start_migration_workflow.js b/samples/generated/v2/migration_service.start_migration_workflow.js index 34fb75b..7f8257d 100644 --- a/samples/generated/v2/migration_service.start_migration_workflow.js +++ b/samples/generated/v2/migration_service.start_migration_workflow.js @@ -35,7 +35,7 @@ function main(name) { // const name = 'abc123' // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/samples/generated/v2alpha/migration_service.create_migration_workflow.js index 3c13323..d8613fe 100644 --- a/samples/generated/v2alpha/migration_service.create_migration_workflow.js +++ b/samples/generated/v2alpha/migration_service.create_migration_workflow.js @@ -39,7 +39,7 @@ function main(parent, migrationWorkflow) { // const migrationWorkflow = {} // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/samples/generated/v2alpha/migration_service.delete_migration_workflow.js index 1637924..e17c7ee 100644 --- a/samples/generated/v2alpha/migration_service.delete_migration_workflow.js +++ b/samples/generated/v2alpha/migration_service.delete_migration_workflow.js @@ -35,7 +35,7 @@ function main(name) { // const name = 'abc123' // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/samples/generated/v2alpha/migration_service.get_migration_subtask.js index be42b52..a17add1 100644 --- a/samples/generated/v2alpha/migration_service.get_migration_subtask.js +++ b/samples/generated/v2alpha/migration_service.get_migration_subtask.js @@ -39,7 +39,7 @@ function main(name) { // const readMask = {} // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/samples/generated/v2alpha/migration_service.get_migration_workflow.js index f3ef8c0..1519196 100644 --- a/samples/generated/v2alpha/migration_service.get_migration_workflow.js +++ b/samples/generated/v2alpha/migration_service.get_migration_workflow.js @@ -39,7 +39,7 @@ function main(name) { // const readMask = {} // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/samples/generated/v2alpha/migration_service.list_migration_subtasks.js index 4b1e2a7..99419dd 100644 --- a/samples/generated/v2alpha/migration_service.list_migration_subtasks.js +++ b/samples/generated/v2alpha/migration_service.list_migration_subtasks.js @@ -57,7 +57,7 @@ function main(parent) { // const filter = 'abc123' // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/samples/generated/v2alpha/migration_service.list_migration_workflows.js index 0b71d06..31c0263 100644 --- a/samples/generated/v2alpha/migration_service.list_migration_workflows.js +++ b/samples/generated/v2alpha/migration_service.list_migration_workflows.js @@ -51,7 +51,7 @@ function main(parent) { // const pageToken = 'abc123' // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/samples/generated/v2alpha/migration_service.start_migration_workflow.js index bcdcbbf..85288c6 100644 --- a/samples/generated/v2alpha/migration_service.start_migration_workflow.js +++ b/samples/generated/v2alpha/migration_service.start_migration_workflow.js @@ -35,7 +35,7 @@ function main(name) { // const name = 'abc123' // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/migration').v2alpha; + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; // Instantiates a client const migrationClient = new MigrationServiceClient(); diff --git a/src/v2/gapic_metadata.json b/src/v2/gapic_metadata.json index e0aa12f..1b6a33c 100644 --- a/src/v2/gapic_metadata.json +++ b/src/v2/gapic_metadata.json @@ -3,7 +3,7 @@ "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", "language": "typescript", "protoPackage": "google.cloud.bigquery.migration.v2", - "libraryPackage": "@google-cloud/migration", + "libraryPackage": "@google-cloud/bigquery-migration", "services": { "MigrationService": { "clients": { diff --git a/src/v2alpha/gapic_metadata.json b/src/v2alpha/gapic_metadata.json index f751ba9..adf8d06 100644 --- a/src/v2alpha/gapic_metadata.json +++ b/src/v2alpha/gapic_metadata.json @@ -3,7 +3,7 @@ "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", "language": "typescript", "protoPackage": "google.cloud.bigquery.migration.v2alpha", - "libraryPackage": "@google-cloud/migration", + "libraryPackage": "@google-cloud/bigquery-migration", "services": { "MigrationService": { "clients": { diff --git a/system-test/fixtures/sample/src/index.js b/system-test/fixtures/sample/src/index.js index 164dcc0..66ee2fb 100644 --- a/system-test/fixtures/sample/src/index.js +++ b/system-test/fixtures/sample/src/index.js @@ -17,7 +17,7 @@ // ** All changes to this file may be overwritten. ** /* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/migration'); +const migration = require('@google-cloud/bigquery-migration'); function main() { const migrationServiceClient = new migration.MigrationServiceClient(); diff --git a/system-test/fixtures/sample/src/index.ts b/system-test/fixtures/sample/src/index.ts index 0afe940..80fbe2d 100644 --- a/system-test/fixtures/sample/src/index.ts +++ b/system-test/fixtures/sample/src/index.ts @@ -16,7 +16,7 @@ // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** -import {MigrationServiceClient} from '@google-cloud/migration'; +import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; // check that the client class type name can be used function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { From f55017854c86ec34341f6bec8c3dd37e716769de Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Sat, 27 Aug 2022 01:18:31 +0000 Subject: [PATCH 10/13] feat: accept google-gax instance as a parameter Please see the documentation of the client constructor for details. PiperOrigin-RevId: 470332808 Source-Link: https://github.com/googleapis/googleapis/commit/d4a23675457cd8f0b44080e0594ec72de1291b89 Source-Link: https://github.com/googleapis/googleapis-gen/commit/e97a1ac204ead4fe7341f91e72db7c6ac6016341 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTk3YTFhYzIwNGVhZDRmZTczNDFmOTFlNzJkYjdjNmFjNjAxNjM0MSJ9 --- owl-bot-staging/v2/.eslintignore | 7 + owl-bot-staging/v2/.eslintrc.json | 3 + owl-bot-staging/v2/.gitignore | 14 + owl-bot-staging/v2/.jsdoc.js | 55 + owl-bot-staging/v2/.mocharc.js | 33 + owl-bot-staging/v2/.prettierrc.js | 22 + owl-bot-staging/v2/README.md | 1 + owl-bot-staging/v2/linkinator.config.json | 16 + owl-bot-staging/v2/package.json | 64 + .../migration/v2/migration_entities.proto | 233 +++ .../v2/migration_error_details.proto | 62 + .../migration/v2/migration_metrics.proto | 111 ++ .../migration/v2/migration_service.proto | 245 ++++ .../migration/v2/translation_config.proto | 257 ++++ ...ation_service.create_migration_workflow.js | 67 + ...ation_service.delete_migration_workflow.js | 62 + ...migration_service.get_migration_subtask.js | 66 + ...igration_service.get_migration_workflow.js | 66 + ...gration_service.list_migration_subtasks.js | 86 ++ ...ration_service.list_migration_workflows.js | 80 ++ ...ration_service.start_migration_workflow.js | 62 + ...ta.google.cloud.bigquery.migration.v2.json | 335 +++++ owl-bot-staging/v2/src/index.ts | 25 + owl-bot-staging/v2/src/v2/gapic_metadata.json | 101 ++ owl-bot-staging/v2/src/v2/index.ts | 19 + .../v2/src/v2/migration_service_client.ts | 1256 +++++++++++++++++ .../v2/migration_service_client_config.json | 71 + .../src/v2/migration_service_proto_list.json | 7 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 32 + owl-bot-staging/v2/system-test/install.ts | 49 + .../v2/test/gapic_migration_service_v2.ts | 1256 +++++++++++++++++ owl-bot-staging/v2/tsconfig.json | 19 + owl-bot-staging/v2/webpack.config.js | 64 + owl-bot-staging/v2alpha/.eslintignore | 7 + owl-bot-staging/v2alpha/.eslintrc.json | 3 + owl-bot-staging/v2alpha/.gitignore | 14 + owl-bot-staging/v2alpha/.jsdoc.js | 55 + owl-bot-staging/v2alpha/.mocharc.js | 33 + owl-bot-staging/v2alpha/.prettierrc.js | 22 + owl-bot-staging/v2alpha/README.md | 1 + .../v2alpha/linkinator.config.json | 16 + owl-bot-staging/v2alpha/package.json | 64 + .../migration/v2alpha/assessment_task.proto | 49 + .../v2alpha/migration_entities.proto | 244 ++++ .../v2alpha/migration_error_details.proto | 62 + .../migration/v2alpha/migration_metrics.proto | 111 ++ .../migration/v2alpha/migration_service.proto | 247 ++++ .../migration/v2alpha/translation_task.proto | 207 +++ ...ation_service.create_migration_workflow.js | 67 + ...ation_service.delete_migration_workflow.js | 62 + ...migration_service.get_migration_subtask.js | 66 + ...igration_service.get_migration_workflow.js | 66 + ...gration_service.list_migration_subtasks.js | 86 ++ ...ration_service.list_migration_workflows.js | 80 ++ ...ration_service.start_migration_workflow.js | 62 + ...ogle.cloud.bigquery.migration.v2alpha.json | 335 +++++ owl-bot-staging/v2alpha/src/index.ts | 25 + .../v2alpha/src/v2alpha/gapic_metadata.json | 101 ++ owl-bot-staging/v2alpha/src/v2alpha/index.ts | 19 + .../src/v2alpha/migration_service_client.ts | 1256 +++++++++++++++++ .../migration_service_client_config.json | 73 + .../v2alpha/migration_service_proto_list.json | 8 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 32 + .../v2alpha/system-test/install.ts | 49 + .../test/gapic_migration_service_v2alpha.ts | 1256 +++++++++++++++++ owl-bot-staging/v2alpha/tsconfig.json | 19 + owl-bot-staging/v2alpha/webpack.config.js | 64 + 69 files changed, 9761 insertions(+) create mode 100644 owl-bot-staging/v2/.eslintignore create mode 100644 owl-bot-staging/v2/.eslintrc.json create mode 100644 owl-bot-staging/v2/.gitignore create mode 100644 owl-bot-staging/v2/.jsdoc.js create mode 100644 owl-bot-staging/v2/.mocharc.js create mode 100644 owl-bot-staging/v2/.prettierrc.js create mode 100644 owl-bot-staging/v2/README.md create mode 100644 owl-bot-staging/v2/linkinator.config.json create mode 100644 owl-bot-staging/v2/package.json create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json create mode 100644 owl-bot-staging/v2/src/index.ts create mode 100644 owl-bot-staging/v2/src/v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/src/v2/index.ts create mode 100644 owl-bot-staging/v2/src/v2/migration_service_client.ts create mode 100644 owl-bot-staging/v2/src/v2/migration_service_client_config.json create mode 100644 owl-bot-staging/v2/src/v2/migration_service_proto_list.json create mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v2/system-test/install.ts create mode 100644 owl-bot-staging/v2/test/gapic_migration_service_v2.ts create mode 100644 owl-bot-staging/v2/tsconfig.json create mode 100644 owl-bot-staging/v2/webpack.config.js create mode 100644 owl-bot-staging/v2alpha/.eslintignore create mode 100644 owl-bot-staging/v2alpha/.eslintrc.json create mode 100644 owl-bot-staging/v2alpha/.gitignore create mode 100644 owl-bot-staging/v2alpha/.jsdoc.js create mode 100644 owl-bot-staging/v2alpha/.mocharc.js create mode 100644 owl-bot-staging/v2alpha/.prettierrc.js create mode 100644 owl-bot-staging/v2alpha/README.md create mode 100644 owl-bot-staging/v2alpha/linkinator.config.json create mode 100644 owl-bot-staging/v2alpha/package.json create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json create mode 100644 owl-bot-staging/v2alpha/src/index.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/index.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json create mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v2alpha/system-test/install.ts create mode 100644 owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts create mode 100644 owl-bot-staging/v2alpha/tsconfig.json create mode 100644 owl-bot-staging/v2alpha/webpack.config.js diff --git a/owl-bot-staging/v2/.eslintignore b/owl-bot-staging/v2/.eslintignore new file mode 100644 index 0000000..cfc348e --- /dev/null +++ b/owl-bot-staging/v2/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v2/.eslintrc.json b/owl-bot-staging/v2/.eslintrc.json new file mode 100644 index 0000000..7821534 --- /dev/null +++ b/owl-bot-staging/v2/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v2/.gitignore b/owl-bot-staging/v2/.gitignore new file mode 100644 index 0000000..5d32b23 --- /dev/null +++ b/owl-bot-staging/v2/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v2/.jsdoc.js b/owl-bot-staging/v2/.jsdoc.js new file mode 100644 index 0000000..c3c1e3d --- /dev/null +++ b/owl-bot-staging/v2/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/bigquery-migration', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v2/.mocharc.js b/owl-bot-staging/v2/.mocharc.js new file mode 100644 index 0000000..481c522 --- /dev/null +++ b/owl-bot-staging/v2/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v2/.prettierrc.js b/owl-bot-staging/v2/.prettierrc.js new file mode 100644 index 0000000..494e147 --- /dev/null +++ b/owl-bot-staging/v2/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v2/README.md b/owl-bot-staging/v2/README.md new file mode 100644 index 0000000..4e0341c --- /dev/null +++ b/owl-bot-staging/v2/README.md @@ -0,0 +1 @@ +Migration: Nodejs Client diff --git a/owl-bot-staging/v2/linkinator.config.json b/owl-bot-staging/v2/linkinator.config.json new file mode 100644 index 0000000..befd23c --- /dev/null +++ b/owl-bot-staging/v2/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2/package.json b/owl-bot-staging/v2/package.json new file mode 100644 index 0000000..8a9a77d --- /dev/null +++ b/owl-bot-staging/v2/package.json @@ -0,0 +1,64 @@ +{ + "name": "@google-cloud/bigquery-migration", + "version": "0.1.0", + "description": "Migration client for Node.js", + "repository": "googleapis/nodejs-migration", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google migration", + "migration", + "migration service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^3.3.0" + }, + "devDependencies": { + "@types/mocha": "^9.1.1", + "@types/node": "^16.11.56", + "@types/sinon": "^10.0.13", + "c8": "^7.12.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.11", + "jsdoc-fresh": "^2.0.1", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^4.0.2", + "mocha": "^10.0.0", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^8.4.0", + "typescript": "^4.8.2", + "webpack": "^4.46.0", + "webpack-cli": "^4.10.0" + }, + "engines": { + "node": ">=v12" + } +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto new file mode 100644 index 0000000..7d77bae --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto @@ -0,0 +1,233 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2/migration_metrics.proto"; +import "google/cloud/bigquery/migration/v2/translation_config.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. + // The ID is server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // The details of the task. + oneof task_details { + // Task configuration for Batch SQL Translation. + TranslationConfigDetails translation_config_details = 14; + } + + // Output only. Immutable. The unique identifier for the migration task. The + // ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be one of the supported task types: + // Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ, + // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, + // Translation_Snowflake2BQ, Translation_Netezza2BQ, + // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, + // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. + string type = 2; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in + // FAILED state. + google.rpc.ErrorInfo processing_error = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID + // is server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in + // FAILED state. + google.rpc.ErrorInfo processing_error = 6 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while + // processing the subtask. Presence of error details does not mean that the + // subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto new file mode 100644 index 0000000..199e2db --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto new file mode 100644 index 0000000..e52fead --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately `+/-9.2x10^18`. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately `+/-10^(+/-300)` and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto new file mode 100644 index 0000000..3c1a89e --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto @@ -0,0 +1,245 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2/migration_entities.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto new file mode 100644 index 0000000..994140d --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto @@ -0,0 +1,257 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "TranslationConfigProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// The translation config to capture necessary settings for a translation task +// and subtask. +message TranslationConfigDetails { + // The chosen path where the source for input files will be found. + oneof source_location { + // The Cloud Storage path for a directory of files to translate in a task. + string gcs_source_path = 1; + } + + // The chosen path where the destination for output files will be found. + oneof target_location { + // The Cloud Storage path to write back the corresponding input files to. + string gcs_target_path = 2; + } + + // The dialect of the input files. + Dialect source_dialect = 3; + + // The target dialect for the engine to translate the input to. + Dialect target_dialect = 4; + + // The mapping of full SQL object names from their current state to the + // desired output. + oneof output_name_mapping { + // The mapping of objects to their desired output names in list form. + ObjectNameMappingList name_mapping_list = 5; + } + + // The default source environment values for the translation. + SourceEnv source_env = 6; +} + +// The possible dialect options for translation. +message Dialect { + // The possible dialect options that this message represents. + oneof dialect_value { + // The BigQuery dialect + BigQueryDialect bigquery_dialect = 1; + + // The HiveQL dialect + HiveQLDialect hiveql_dialect = 2; + + // The Redshift dialect + RedshiftDialect redshift_dialect = 3; + + // The Teradata dialect + TeradataDialect teradata_dialect = 4; + + // The Oracle dialect + OracleDialect oracle_dialect = 5; + + // The SparkSQL dialect + SparkSQLDialect sparksql_dialect = 6; + + // The Snowflake dialect + SnowflakeDialect snowflake_dialect = 7; + + // The Netezza dialect + NetezzaDialect netezza_dialect = 8; + + // The Azure Synapse dialect + AzureSynapseDialect azure_synapse_dialect = 9; + + // The Vertica dialect + VerticaDialect vertica_dialect = 10; + + // The SQL Server dialect + SQLServerDialect sql_server_dialect = 11; + + // The Postgresql dialect + PostgresqlDialect postgresql_dialect = 12; + + // The Presto dialect + PrestoDialect presto_dialect = 13; + + // The MySQL dialect + MySQLDialect mysql_dialect = 14; + } +} + +// The dialect definition for BigQuery. +message BigQueryDialect {} + +// The dialect definition for HiveQL. +message HiveQLDialect {} + +// The dialect definition for Redshift. +message RedshiftDialect {} + +// The dialect definition for Teradata. +message TeradataDialect { + // The sub-dialect options for Teradata. + enum Mode { + // Unspecified mode. + MODE_UNSPECIFIED = 0; + + // Teradata SQL mode. + SQL = 1; + + // BTEQ mode (which includes SQL). + BTEQ = 2; + } + + // Which Teradata sub-dialect mode the user specifies. + Mode mode = 1; +} + +// The dialect definition for Oracle. +message OracleDialect {} + +// The dialect definition for SparkSQL. +message SparkSQLDialect {} + +// The dialect definition for Snowflake. +message SnowflakeDialect {} + +// The dialect definition for Netezza. +message NetezzaDialect {} + +// The dialect definition for Azure Synapse. +message AzureSynapseDialect {} + +// The dialect definition for Vertica. +message VerticaDialect {} + +// The dialect definition for SQL Server. +message SQLServerDialect {} + +// The dialect definition for Postgresql. +message PostgresqlDialect {} + +// The dialect definition for Presto. +message PrestoDialect {} + +// The dialect definition for MySQL. +message MySQLDialect {} + +// Represents a map of name mappings using a list of key:value proto messages of +// existing name to desired output name. +message ObjectNameMappingList { + // The elements of the object name map. + repeated ObjectNameMapping name_map = 1; +} + +// Represents a key-value pair of NameMappingKey to NameMappingValue to +// represent the mapping of SQL names from the input value to desired output. +message ObjectNameMapping { + // The name of the object in source that is being mapped. + NameMappingKey source = 1; + + // The desired target name of the object that is being mapped. + NameMappingValue target = 2; +} + +// The potential components of a full name mapping that will be mapped +// during translation in the source data warehouse. +message NameMappingKey { + // The type of the object that is being mapped. + enum Type { + // Unspecified name mapping type. + TYPE_UNSPECIFIED = 0; + + // The object being mapped is a database. + DATABASE = 1; + + // The object being mapped is a schema. + SCHEMA = 2; + + // The object being mapped is a relation. + RELATION = 3; + + // The object being mapped is an attribute. + ATTRIBUTE = 4; + + // The object being mapped is a relation alias. + RELATION_ALIAS = 5; + + // The object being mapped is a an attribute alias. + ATTRIBUTE_ALIAS = 6; + + // The object being mapped is a function. + FUNCTION = 7; + } + + // The type of object that is being mapped. + Type type = 1; + + // The database name (BigQuery project ID equivalent in the source data + // warehouse). + string database = 2; + + // The schema name (BigQuery dataset equivalent in the source data warehouse). + string schema = 3; + + // The relation name (BigQuery table or view equivalent in the source data + // warehouse). + string relation = 4; + + // The attribute name (BigQuery column equivalent in the source data + // warehouse). + string attribute = 5; +} + +// The potential components of a full name mapping that will be mapped +// during translation in the target data warehouse. +message NameMappingValue { + // The database name (BigQuery project ID equivalent in the target data + // warehouse). + string database = 1; + + // The schema name (BigQuery dataset equivalent in the target data warehouse). + string schema = 2; + + // The relation name (BigQuery table or view equivalent in the target data + // warehouse). + string relation = 3; + + // The attribute name (BigQuery column equivalent in the target data + // warehouse). + string attribute = 4; +} + +// Represents the default source environment values for the translation. +message SourceEnv { + // The default database name to fully qualify SQL objects when their database + // name is missing. + string default_database = 1; + + // The schema search path. When SQL objects are missing schema name, + // translation engine will search through this list to find the value. + repeated string schema_search_path = 2; +} diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js new file mode 100644 index 0000000..8301c3a --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js @@ -0,0 +1,67 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, migrationWorkflow) { + // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + */ + // const parent = 'abc123' + /** + * Required. The migration workflow to create. + */ + // const migrationWorkflow = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callCreateMigrationWorkflow() { + // Construct request + const request = { + parent, + migrationWorkflow, + }; + + // Run request + const response = await migrationClient.createMigrationWorkflow(request); + console.log(response); + } + + callCreateMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js new file mode 100644 index 0000000..9f0651e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callDeleteMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.deleteMigrationWorkflow(request); + console.log(response); + } + + callDeleteMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js new file mode 100644 index 0000000..25de9e0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + */ + // const name = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationSubtask() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationSubtask(request); + console.log(response); + } + + callGetMigrationSubtask(); + // [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js new file mode 100644 index 0000000..52ab5cd --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationWorkflow(request); + console.log(response); + } + + callGetMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js new file mode 100644 index 0000000..c5c7ed0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js @@ -0,0 +1,86 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const parent = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + /** + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + /** + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + */ + // const filter = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationSubtasks() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationSubtasksAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationSubtasks(); + // [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js new file mode 100644 index 0000000..ebd2127 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js @@ -0,0 +1,80 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + */ + // const parent = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + /** + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationWorkflows() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationWorkflowsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationWorkflows(); + // [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js new file mode 100644 index 0000000..7f8257d --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callStartMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.startMigrationWorkflow(request); + console.log(response); + } + + callStartMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json new file mode 100644 index 0000000..81ec8bb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json @@ -0,0 +1,335 @@ +{ + "clientLibrary": { + "name": "nodejs-migration", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2", + "version": "v2" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", + "title": "MigrationService createMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Creates a migration workflow.", + "canonical": true, + "file": "migration_service.create_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 59, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "migration_workflow", + "type": ".google.cloud.bigquery.migration.v2.MigrationWorkflow" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", + "title": "MigrationService getMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration workflow.", + "canonical": true, + "file": "migration_service.get_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", + "title": "MigrationService listMigrationWorkflows Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration workflow.", + "canonical": true, + "file": "migration_service.list_migration_workflows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 72, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", + "title": "MigrationService deleteMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Deletes a migration workflow by name.", + "canonical": true, + "file": "migration_service.delete_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", + "title": "MigrationService startMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", + "canonical": true, + "file": "migration_service.start_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", + "title": "MigrationService getMigrationSubtask Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration subtask.", + "canonical": true, + "file": "migration_service.get_migration_subtask.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationSubtask", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", + "title": "MigrationService listMigrationSubtasks Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration subtasks.", + "canonical": true, + "file": "migration_service.list_migration_subtasks.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 78, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v2/src/index.ts b/owl-bot-staging/v2/src/index.ts new file mode 100644 index 0000000..35a8fd9 --- /dev/null +++ b/owl-bot-staging/v2/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v2 from './v2'; +const MigrationServiceClient = v2.MigrationServiceClient; +type MigrationServiceClient = v2.MigrationServiceClient; +export {v2, MigrationServiceClient}; +export default {v2, MigrationServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v2/src/v2/gapic_metadata.json b/owl-bot-staging/v2/src/v2/gapic_metadata.json new file mode 100644 index 0000000..1b6a33c --- /dev/null +++ b/owl-bot-staging/v2/src/v2/gapic_metadata.json @@ -0,0 +1,101 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.migration.v2", + "libraryPackage": "@google-cloud/bigquery-migration", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/src/v2/index.ts b/owl-bot-staging/v2/src/v2/index.ts new file mode 100644 index 0000000..f75d208 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2/src/v2/migration_service_client.ts b/owl-bot-staging/v2/src/v2/migration_service_client.ts new file mode 100644 index 0000000..cfb3f86 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_client.ts @@ -0,0 +1,1256 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import type * as gax from 'google-gax'; +import type {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; +import {Transform} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v2/migration_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './migration_service_client_config.json'; +const version = require('../../../package.json').version; + +/** + * Service to handle EDW migrations. + * @class + * @memberof v2 + */ +export class MigrationServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + migrationServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MigrationServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new MigrationServiceClient({fallback: 'rest'}, gax); + * ``` + */ + constructor(opts?: ClientOptions, gaxInstance?: typeof gax | typeof gax.fallback) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof MigrationServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' + ), + migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listMigrationWorkflows: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), + listMigrationSubtasks: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.migration.v2.MigrationService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = this._gaxModule.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.migrationServiceStub) { + return this.migrationServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.migration.v2.MigrationService. + this.migrationServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2.MigrationService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.migration.v2.MigrationService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const migrationServiceStubMethods = + ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; + for (const methodName of migrationServiceStubMethods) { + const callPromise = this.migrationServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.migrationServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates a migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param {google.cloud.bigquery.migration.v2.MigrationWorkflow} request.migrationWorkflow + * Required. The migration workflow to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.create_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async + */ + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.get_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async + */ + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationWorkflow(request, options, callback); + } +/** + * Deletes a migration workflow by name. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.delete_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async + */ + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); + } +/** + * Starts a previously created migration workflow. I.e., the state transitions + * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + * An error will be signaled if the state is anything other than DRAFT or + * RUNNING. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.start_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async + */ + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.startMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration subtask. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.get_migration_subtask.js + * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async + */ + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationSubtask(request, options, callback); + } + + /** + * Lists previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse + ]>; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationWorkflows(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflowsStream( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.createStream( + this.innerApiCalls.listMigrationWorkflows as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationWorkflows`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.list_migration_workflows.js + * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async + */ + listMigrationWorkflowsAsync( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.asyncIterate( + this.innerApiCalls['listMigrationWorkflows'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + /** + * Lists previously created migration subtasks. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse + ]>; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationSubtasks(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasksStream( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.createStream( + this.innerApiCalls.listMigrationSubtasks as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationSubtasks`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.list_migration_subtasks.js + * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async + */ + listMigrationSubtasksAsync( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.asyncIterate( + this.innerApiCalls['listMigrationSubtasks'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified migrationSubtask resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @param {string} subtask + * @returns {string} Resource name string. + */ + migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { + return this.pathTemplates.migrationSubtaskPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + subtask: subtask, + }); + } + + /** + * Parse the project from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; + } + + /** + * Parse the location from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; + } + + /** + * Parse the workflow from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; + } + + /** + * Parse the subtask from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the subtask. + */ + matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; + } + + /** + * Return a fully-qualified migrationWorkflow resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @returns {string} Resource name string. + */ + migrationWorkflowPath(project:string,location:string,workflow:string) { + return this.pathTemplates.migrationWorkflowPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + }); + } + + /** + * Parse the project from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; + } + + /** + * Parse the location from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; + } + + /** + * Parse the workflow from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.migrationServiceStub && !this._terminated) { + return this.migrationServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v2/src/v2/migration_service_client_config.json b/owl-bot-staging/v2/src/v2/migration_service_client_config.json new file mode 100644 index 0000000..5832815 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_client_config.json @@ -0,0 +1,71 @@ +{ + "interfaces": { + "google.cloud.bigquery.migration.v2.MigrationService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 10000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationWorkflows": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "DeleteMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "GetMigrationSubtask": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ListMigrationSubtasks": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json new file mode 100644 index 0000000..57df7ab --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json @@ -0,0 +1,7 @@ +[ + "../../protos/google/cloud/bigquery/migration/v2/migration_entities.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_error_details.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_metrics.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_service.proto", + "../../protos/google/cloud/bigquery/migration/v2/translation_config.proto" +] diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js new file mode 100644 index 0000000..ecc7e4b --- /dev/null +++ b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const migration = require('@google-cloud/bigquery-migration'); + +function main() { + const migrationServiceClient = new migration.MigrationServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts new file mode 100644 index 0000000..80fbe2d --- /dev/null +++ b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,32 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; + +// check that the client class type name can be used +function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const migrationServiceClient = new MigrationServiceClient(); + doStuffWithMigrationServiceClient(migrationServiceClient); +} + +main(); diff --git a/owl-bot-staging/v2/system-test/install.ts b/owl-bot-staging/v2/system-test/install.ts new file mode 100644 index 0000000..557a575 --- /dev/null +++ b/owl-bot-staging/v2/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {packNTest} from 'pack-n-play'; +import {readFileSync} from 'fs'; +import {describe, it} from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts new file mode 100644 index 0000000..061c58c --- /dev/null +++ b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts @@ -0,0 +1,1256 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as migrationserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v2.MigrationServiceClient', () => { + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = migrationserviceModule.v2.MigrationServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = migrationserviceModule.v2.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('createMigrationWorkflow', () => { + it('invokes createMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.createMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationWorkflow', () => { + it('invokes getMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + }); + }); + + describe('deleteMigrationWorkflow', () => { + it('invokes deleteMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.deleteMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + }); + }); + + describe('startMigrationWorkflow', () => { + it('invokes startMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.startMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationSubtask', () => { + it('invokes getMigrationSubtask without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationSubtask(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationSubtask( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationSubtask with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + }); + }); + + describe('listMigrationWorkflows', () => { + it('invokes listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationWorkflows(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflows without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationWorkflows( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationWorkflows(request), expectedError); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflowsStream without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationWorkflowsStream with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; + const iterable = client.listMigrationWorkflowsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationWorkflowsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('listMigrationSubtasks', () => { + it('invokes listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationSubtasks(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasks without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationSubtasks( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationSubtasks(request), expectedError); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasksStream without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationSubtasksStream with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; + const iterable = client.listMigrationSubtasksAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationSubtasksAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationSubtask', () => { + const fakePath = "/rendered/path/migrationSubtask"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + subtask: "subtaskValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationSubtaskPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationSubtaskPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationSubtaskPath', () => { + const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationSubtaskName', () => { + const result = client.matchProjectFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationSubtaskName', () => { + const result = client.matchLocationFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationSubtaskName', () => { + const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchSubtaskFromMigrationSubtaskName', () => { + const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "subtaskValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationWorkflow', () => { + const fakePath = "/rendered/path/migrationWorkflow"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationWorkflowPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationWorkflowPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationWorkflowPath', () => { + const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationWorkflowName', () => { + const result = client.matchProjectFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationWorkflowName', () => { + const result = client.matchLocationFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationWorkflowName', () => { + const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v2/tsconfig.json b/owl-bot-staging/v2/tsconfig.json new file mode 100644 index 0000000..c78f1c8 --- /dev/null +++ b/owl-bot-staging/v2/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v2/webpack.config.js b/owl-bot-staging/v2/webpack.config.js new file mode 100644 index 0000000..9178c4b --- /dev/null +++ b/owl-bot-staging/v2/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'MigrationService', + filename: './migration-service.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; diff --git a/owl-bot-staging/v2alpha/.eslintignore b/owl-bot-staging/v2alpha/.eslintignore new file mode 100644 index 0000000..cfc348e --- /dev/null +++ b/owl-bot-staging/v2alpha/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v2alpha/.eslintrc.json b/owl-bot-staging/v2alpha/.eslintrc.json new file mode 100644 index 0000000..7821534 --- /dev/null +++ b/owl-bot-staging/v2alpha/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v2alpha/.gitignore b/owl-bot-staging/v2alpha/.gitignore new file mode 100644 index 0000000..5d32b23 --- /dev/null +++ b/owl-bot-staging/v2alpha/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v2alpha/.jsdoc.js b/owl-bot-staging/v2alpha/.jsdoc.js new file mode 100644 index 0000000..c3c1e3d --- /dev/null +++ b/owl-bot-staging/v2alpha/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/bigquery-migration', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v2alpha/.mocharc.js b/owl-bot-staging/v2alpha/.mocharc.js new file mode 100644 index 0000000..481c522 --- /dev/null +++ b/owl-bot-staging/v2alpha/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v2alpha/.prettierrc.js b/owl-bot-staging/v2alpha/.prettierrc.js new file mode 100644 index 0000000..494e147 --- /dev/null +++ b/owl-bot-staging/v2alpha/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v2alpha/README.md b/owl-bot-staging/v2alpha/README.md new file mode 100644 index 0000000..4e0341c --- /dev/null +++ b/owl-bot-staging/v2alpha/README.md @@ -0,0 +1 @@ +Migration: Nodejs Client diff --git a/owl-bot-staging/v2alpha/linkinator.config.json b/owl-bot-staging/v2alpha/linkinator.config.json new file mode 100644 index 0000000..befd23c --- /dev/null +++ b/owl-bot-staging/v2alpha/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2alpha/package.json b/owl-bot-staging/v2alpha/package.json new file mode 100644 index 0000000..8a9a77d --- /dev/null +++ b/owl-bot-staging/v2alpha/package.json @@ -0,0 +1,64 @@ +{ + "name": "@google-cloud/bigquery-migration", + "version": "0.1.0", + "description": "Migration client for Node.js", + "repository": "googleapis/nodejs-migration", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google migration", + "migration", + "migration service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^3.3.0" + }, + "devDependencies": { + "@types/mocha": "^9.1.1", + "@types/node": "^16.11.56", + "@types/sinon": "^10.0.13", + "c8": "^7.12.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.11", + "jsdoc-fresh": "^2.0.1", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^4.0.2", + "mocha": "^10.0.0", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^8.4.0", + "typescript": "^4.8.2", + "webpack": "^4.46.0", + "webpack-cli": "^4.10.0" + }, + "engines": { + "node": ">=v12" + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto new file mode 100644 index 0000000..0c6ea13 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto @@ -0,0 +1,49 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "AssessmentTaskProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Assessment task config. +message AssessmentTaskDetails { + // Required. The Cloud Storage path for assessment input files. + string input_path = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The BigQuery dataset for output. + string output_dataset = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. An optional Cloud Storage path to write the query logs (which is + // then used as an input path on the translation task) + string querylogs_path = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data source or data warehouse type (eg: TERADATA/REDSHIFT) + // from which the input data is extracted. + string data_source = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// Details for an assessment task orchestration result. +message AssessmentOrchestrationResultDetails { + // Optional. The version used for the output table schemas. + string output_tables_schema_version = 1 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto new file mode 100644 index 0000000..50d4c75 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto @@ -0,0 +1,244 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/assessment_task.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/cloud/bigquery/migration/v2alpha/translation_task.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // The details of the task. + oneof task_details { + // Task configuration for Assessment. + AssessmentTaskDetails assessment_task_details = 12; + + // Task configuration for Batch/Offline SQL Translation. + TranslationTaskDetails translation_task_details = 13; + } + + // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be a supported task type. + string type = 2; + + // DEPRECATED! Use one of the task_details below. + // The details of the task. The type URL must be one of the supported task + // details messages and correspond to the Task's type. + google.protobuf.Any details = 3; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; + + // Output only. Additional information about the orchestration. + MigrationTaskOrchestrationResult orchestration_result = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while processing the + // subtask. Presence of error details does not mean that the subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} + +// Additional information from the orchestrator when it is done with the +// task orchestration. +message MigrationTaskOrchestrationResult { + // Details specific to the task type. + oneof details { + // Details specific to assessment task types. + AssessmentOrchestrationResultDetails assessment_details = 1; + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto new file mode 100644 index 0000000..89dac5e --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto new file mode 100644 index 0000000..ce60dd2 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately +/-9.2x10^18. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately +/-10^(+/-300) and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto new file mode 100644 index 0000000..9a184a1 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto @@ -0,0 +1,247 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2alpha/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto new file mode 100644 index 0000000..bf4b27e --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto @@ -0,0 +1,207 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "TranslationTaskProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Mapping between an input and output file to be translated in a subtask. +message TranslationFileMapping { + // The Cloud Storage path for a file to translation in a subtask. + string input_path = 1; + + // The Cloud Storage path to write back the corresponding input file to. + string output_path = 2; +} + +// The translation task config to capture necessary settings for a translation +// task and subtask. +message TranslationTaskDetails { + // The file encoding types. + enum FileEncoding { + // File encoding setting is not specified. + FILE_ENCODING_UNSPECIFIED = 0; + + // File encoding is UTF_8. + UTF_8 = 1; + + // File encoding is ISO_8859_1. + ISO_8859_1 = 2; + + // File encoding is US_ASCII. + US_ASCII = 3; + + // File encoding is UTF_16. + UTF_16 = 4; + + // File encoding is UTF_16LE. + UTF_16LE = 5; + + // File encoding is UTF_16BE. + UTF_16BE = 6; + } + + // The special token data type. + enum TokenType { + // Token type is not specified. + TOKEN_TYPE_UNSPECIFIED = 0; + + // Token type as string. + STRING = 1; + + // Token type as integer. + INT64 = 2; + + // Token type as numeric. + NUMERIC = 3; + + // Token type as boolean. + BOOL = 4; + + // Token type as float. + FLOAT64 = 5; + + // Token type as date. + DATE = 6; + + // Token type as timestamp. + TIMESTAMP = 7; + } + + // The language specific settings for the translation task. + oneof language_options { + // The Teradata SQL specific settings for the translation task. + TeradataOptions teradata_options = 10; + + // The BTEQ specific settings for the translation task. + BteqOptions bteq_options = 11; + } + + // The Cloud Storage path for translation input files. + string input_path = 1; + + // The Cloud Storage path for translation output files. + string output_path = 2; + + // Cloud Storage files to be processed for translation. + repeated TranslationFileMapping file_paths = 12; + + // The Cloud Storage path to DDL files as table schema to assist semantic + // translation. + string schema_path = 3; + + // The file encoding type. + FileEncoding file_encoding = 4; + + // The settings for SQL identifiers. + IdentifierSettings identifier_settings = 5; + + // The map capturing special tokens to be replaced during translation. The key + // is special token in string. The value is the token data type. This is used + // to translate SQL query template which contains special token as place + // holder. The special token makes a query invalid to parse. This map will be + // applied to annotate those special token with types to let parser understand + // how to parse them into proper structure with type information. + map special_token_map = 6; + + // The filter applied to translation details. + Filter filter = 7; + + // Specifies the exact name of the bigquery table ("dataset.table") to be used + // for surfacing raw translation errors. If the table does not exist, we will + // create it. If it already exists and the schema is the same, we will re-use. + // If the table exists and the schema is different, we will throw an error. + string translation_exception_table = 13; +} + +// The filter applied to fields of translation details. +message Filter { + // The list of prefixes used to exclude processing for input files. + repeated string input_file_exclusion_prefixes = 1; +} + +// Settings related to SQL identifiers. +message IdentifierSettings { + // The identifier case type. + enum IdentifierCase { + // The identifier case is not specified. + IDENTIFIER_CASE_UNSPECIFIED = 0; + + // Identifiers' cases will be kept as the original cases. + ORIGINAL = 1; + + // Identifiers will be in upper cases. + UPPER = 2; + + // Identifiers will be in lower cases. + LOWER = 3; + } + + // The SQL identifier rewrite mode. + enum IdentifierRewriteMode { + // SQL Identifier rewrite mode is unspecified. + IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0; + + // SQL identifiers won't be rewrite. + NONE = 1; + + // All SQL identifiers will be rewrite. + REWRITE_ALL = 2; + } + + // The setting to control output queries' identifier case. + IdentifierCase output_identifier_case = 1; + + // Specifies the rewrite mode for SQL identifiers. + IdentifierRewriteMode identifier_rewrite_mode = 2; +} + +// Teradata SQL specific translation task related settings. +message TeradataOptions { + +} + +// BTEQ translation task related settings. +message BteqOptions { + // Specifies the project and dataset in BigQuery that will be used for + // external table creation during the translation. + DatasetReference project_dataset = 1; + + // The Cloud Storage location to be used as the default path for files that + // are not otherwise specified in the file replacement map. + string default_path_uri = 2; + + // Maps the local paths that are used in BTEQ scripts (the keys) to the paths + // in Cloud Storage that should be used in their stead in the translation (the + // value). + map file_replacement_map = 3; +} + +// Reference to a BigQuery dataset. +message DatasetReference { + // A unique ID for this dataset, without the project name. The ID + // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). + // The maximum length is 1,024 characters. + string dataset_id = 1; + + // The ID of the project containing this dataset. + string project_id = 2; +} diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js new file mode 100644 index 0000000..d8613fe --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js @@ -0,0 +1,67 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, migrationWorkflow) { + // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + */ + // const parent = 'abc123' + /** + * Required. The migration workflow to create. + */ + // const migrationWorkflow = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callCreateMigrationWorkflow() { + // Construct request + const request = { + parent, + migrationWorkflow, + }; + + // Run request + const response = await migrationClient.createMigrationWorkflow(request); + console.log(response); + } + + callCreateMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js new file mode 100644 index 0000000..e17c7ee --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callDeleteMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.deleteMigrationWorkflow(request); + console.log(response); + } + + callDeleteMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js new file mode 100644 index 0000000..a17add1 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + */ + // const name = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationSubtask() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationSubtask(request); + console.log(response); + } + + callGetMigrationSubtask(); + // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js new file mode 100644 index 0000000..1519196 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationWorkflow(request); + console.log(response); + } + + callGetMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js new file mode 100644 index 0000000..99419dd --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js @@ -0,0 +1,86 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const parent = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + /** + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + /** + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + */ + // const filter = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationSubtasks() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationSubtasksAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationSubtasks(); + // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js new file mode 100644 index 0000000..31c0263 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js @@ -0,0 +1,80 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + */ + // const parent = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + /** + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationWorkflows() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationWorkflowsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationWorkflows(); + // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js new file mode 100644 index 0000000..85288c6 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callStartMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.startMigrationWorkflow(request); + console.log(response); + } + + callStartMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json new file mode 100644 index 0000000..c5336c2 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json @@ -0,0 +1,335 @@ +{ + "clientLibrary": { + "name": "nodejs-migration", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2alpha", + "version": "v2alpha" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", + "title": "MigrationService createMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Creates a migration workflow.", + "canonical": true, + "file": "migration_service.create_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 59, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "migration_workflow", + "type": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", + "title": "MigrationService getMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration workflow.", + "canonical": true, + "file": "migration_service.get_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", + "title": "MigrationService listMigrationWorkflows Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration workflow.", + "canonical": true, + "file": "migration_service.list_migration_workflows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 72, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", + "title": "MigrationService deleteMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Deletes a migration workflow by name.", + "canonical": true, + "file": "migration_service.delete_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", + "title": "MigrationService startMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", + "canonical": true, + "file": "migration_service.start_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", + "title": "MigrationService getMigrationSubtask Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration subtask.", + "canonical": true, + "file": "migration_service.get_migration_subtask.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationSubtask", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", + "title": "MigrationService listMigrationSubtasks Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration subtasks.", + "canonical": true, + "file": "migration_service.list_migration_subtasks.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 78, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v2alpha/src/index.ts b/owl-bot-staging/v2alpha/src/index.ts new file mode 100644 index 0000000..288e629 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v2alpha from './v2alpha'; +const MigrationServiceClient = v2alpha.MigrationServiceClient; +type MigrationServiceClient = v2alpha.MigrationServiceClient; +export {v2alpha, MigrationServiceClient}; +export default {v2alpha, MigrationServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json new file mode 100644 index 0000000..adf8d06 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json @@ -0,0 +1,101 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.migration.v2alpha", + "libraryPackage": "@google-cloud/bigquery-migration", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/index.ts b/owl-bot-staging/v2alpha/src/v2alpha/index.ts new file mode 100644 index 0000000..f75d208 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts new file mode 100644 index 0000000..d5105f9 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts @@ -0,0 +1,1256 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import type * as gax from 'google-gax'; +import type {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; +import {Transform} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v2alpha/migration_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './migration_service_client_config.json'; +const version = require('../../../package.json').version; + +/** + * Service to handle EDW migrations. + * @class + * @memberof v2alpha + */ +export class MigrationServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + migrationServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MigrationServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new MigrationServiceClient({fallback: 'rest'}, gax); + * ``` + */ + constructor(opts?: ClientOptions, gaxInstance?: typeof gax | typeof gax.fallback) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof MigrationServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' + ), + migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listMigrationWorkflows: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), + listMigrationSubtasks: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.migration.v2alpha.MigrationService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = this._gaxModule.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.migrationServiceStub) { + return this.migrationServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.migration.v2alpha.MigrationService. + this.migrationServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2alpha.MigrationService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.migration.v2alpha.MigrationService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const migrationServiceStubMethods = + ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; + for (const methodName of migrationServiceStubMethods) { + const callPromise = this.migrationServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.migrationServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates a migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param {google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} request.migrationWorkflow + * Required. The migration workflow to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.create_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async + */ + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.get_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async + */ + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationWorkflow(request, options, callback); + } +/** + * Deletes a migration workflow by name. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.delete_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async + */ + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); + } +/** + * Starts a previously created migration workflow. I.e., the state transitions + * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + * An error will be signaled if the state is anything other than DRAFT or + * RUNNING. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.start_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async + */ + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.startMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration subtask. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.get_migration_subtask.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async + */ + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationSubtask(request, options, callback); + } + + /** + * Lists previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse + ]>; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationWorkflows(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflowsStream( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.createStream( + this.innerApiCalls.listMigrationWorkflows as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationWorkflows`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.list_migration_workflows.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async + */ + listMigrationWorkflowsAsync( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.asyncIterate( + this.innerApiCalls['listMigrationWorkflows'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + /** + * Lists previously created migration subtasks. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse + ]>; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationSubtasks(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasksStream( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.createStream( + this.innerApiCalls.listMigrationSubtasks as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationSubtasks`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.list_migration_subtasks.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async + */ + listMigrationSubtasksAsync( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.asyncIterate( + this.innerApiCalls['listMigrationSubtasks'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified migrationSubtask resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @param {string} subtask + * @returns {string} Resource name string. + */ + migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { + return this.pathTemplates.migrationSubtaskPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + subtask: subtask, + }); + } + + /** + * Parse the project from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; + } + + /** + * Parse the location from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; + } + + /** + * Parse the workflow from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; + } + + /** + * Parse the subtask from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the subtask. + */ + matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; + } + + /** + * Return a fully-qualified migrationWorkflow resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @returns {string} Resource name string. + */ + migrationWorkflowPath(project:string,location:string,workflow:string) { + return this.pathTemplates.migrationWorkflowPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + }); + } + + /** + * Parse the project from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; + } + + /** + * Parse the location from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; + } + + /** + * Parse the workflow from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.migrationServiceStub && !this._terminated) { + return this.migrationServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json new file mode 100644 index 0000000..2184b83 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json @@ -0,0 +1,73 @@ +{ + "interfaces": { + "google.cloud.bigquery.migration.v2alpha.MigrationService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 10000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetMigrationWorkflow": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationWorkflows": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "DeleteMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartMigrationWorkflow": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "GetMigrationSubtask": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationSubtasks": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + } + } + } + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json new file mode 100644 index 0000000..8e91e42 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json @@ -0,0 +1,8 @@ +[ + "../../protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto" +] diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js new file mode 100644 index 0000000..ecc7e4b --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const migration = require('@google-cloud/bigquery-migration'); + +function main() { + const migrationServiceClient = new migration.MigrationServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts new file mode 100644 index 0000000..80fbe2d --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,32 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; + +// check that the client class type name can be used +function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const migrationServiceClient = new MigrationServiceClient(); + doStuffWithMigrationServiceClient(migrationServiceClient); +} + +main(); diff --git a/owl-bot-staging/v2alpha/system-test/install.ts b/owl-bot-staging/v2alpha/system-test/install.ts new file mode 100644 index 0000000..557a575 --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {packNTest} from 'pack-n-play'; +import {readFileSync} from 'fs'; +import {describe, it} from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts new file mode 100644 index 0000000..99aac57 --- /dev/null +++ b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts @@ -0,0 +1,1256 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as migrationserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v2alpha.MigrationServiceClient', () => { + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('createMigrationWorkflow', () => { + it('invokes createMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.createMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationWorkflow', () => { + it('invokes getMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + }); + }); + + describe('deleteMigrationWorkflow', () => { + it('invokes deleteMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.deleteMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + }); + }); + + describe('startMigrationWorkflow', () => { + it('invokes startMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.startMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationSubtask', () => { + it('invokes getMigrationSubtask without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationSubtask(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationSubtask( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationSubtask with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + }); + }); + + describe('listMigrationWorkflows', () => { + it('invokes listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationWorkflows(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflows without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationWorkflows( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationWorkflows(request), expectedError); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflowsStream without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationWorkflowsStream with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; + const iterable = client.listMigrationWorkflowsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationWorkflowsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('listMigrationSubtasks', () => { + it('invokes listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationSubtasks(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasks without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationSubtasks( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationSubtasks(request), expectedError); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasksStream without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationSubtasksStream with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; + const iterable = client.listMigrationSubtasksAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationSubtasksAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationSubtask', () => { + const fakePath = "/rendered/path/migrationSubtask"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + subtask: "subtaskValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationSubtaskPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationSubtaskPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationSubtaskPath', () => { + const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationSubtaskName', () => { + const result = client.matchProjectFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationSubtaskName', () => { + const result = client.matchLocationFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationSubtaskName', () => { + const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchSubtaskFromMigrationSubtaskName', () => { + const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "subtaskValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationWorkflow', () => { + const fakePath = "/rendered/path/migrationWorkflow"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationWorkflowPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationWorkflowPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationWorkflowPath', () => { + const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationWorkflowName', () => { + const result = client.matchProjectFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationWorkflowName', () => { + const result = client.matchLocationFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationWorkflowName', () => { + const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v2alpha/tsconfig.json b/owl-bot-staging/v2alpha/tsconfig.json new file mode 100644 index 0000000..c78f1c8 --- /dev/null +++ b/owl-bot-staging/v2alpha/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v2alpha/webpack.config.js b/owl-bot-staging/v2alpha/webpack.config.js new file mode 100644 index 0000000..9178c4b --- /dev/null +++ b/owl-bot-staging/v2alpha/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'MigrationService', + filename: './migration-service.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; From 347ed2b8b1ed20a8eb6a33e8fbd64ac99326ad89 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Sat, 27 Aug 2022 01:20:16 +0000 Subject: [PATCH 11/13] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- owl-bot-staging/v2/.eslintignore | 7 - owl-bot-staging/v2/.eslintrc.json | 3 - owl-bot-staging/v2/.gitignore | 14 - owl-bot-staging/v2/.jsdoc.js | 55 - owl-bot-staging/v2/.mocharc.js | 33 - owl-bot-staging/v2/.prettierrc.js | 22 - owl-bot-staging/v2/README.md | 1 - owl-bot-staging/v2/linkinator.config.json | 16 - owl-bot-staging/v2/package.json | 64 - .../migration/v2/migration_entities.proto | 233 --- .../v2/migration_error_details.proto | 62 - .../migration/v2/migration_metrics.proto | 111 -- .../migration/v2/migration_service.proto | 245 ---- .../migration/v2/translation_config.proto | 257 ---- ...ation_service.create_migration_workflow.js | 67 - ...ation_service.delete_migration_workflow.js | 62 - ...migration_service.get_migration_subtask.js | 66 - ...igration_service.get_migration_workflow.js | 66 - ...gration_service.list_migration_subtasks.js | 86 -- ...ration_service.list_migration_workflows.js | 80 -- ...ration_service.start_migration_workflow.js | 62 - ...ta.google.cloud.bigquery.migration.v2.json | 335 ----- owl-bot-staging/v2/src/index.ts | 25 - owl-bot-staging/v2/src/v2/gapic_metadata.json | 101 -- owl-bot-staging/v2/src/v2/index.ts | 19 - .../v2/src/v2/migration_service_client.ts | 1256 ----------------- .../v2/migration_service_client_config.json | 71 - .../src/v2/migration_service_proto_list.json | 7 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 32 - owl-bot-staging/v2/system-test/install.ts | 49 - .../v2/test/gapic_migration_service_v2.ts | 1256 ----------------- owl-bot-staging/v2/tsconfig.json | 19 - owl-bot-staging/v2/webpack.config.js | 64 - owl-bot-staging/v2alpha/.eslintignore | 7 - owl-bot-staging/v2alpha/.eslintrc.json | 3 - owl-bot-staging/v2alpha/.gitignore | 14 - owl-bot-staging/v2alpha/.jsdoc.js | 55 - owl-bot-staging/v2alpha/.mocharc.js | 33 - owl-bot-staging/v2alpha/.prettierrc.js | 22 - owl-bot-staging/v2alpha/README.md | 1 - .../v2alpha/linkinator.config.json | 16 - owl-bot-staging/v2alpha/package.json | 64 - .../migration/v2alpha/assessment_task.proto | 49 - .../v2alpha/migration_entities.proto | 244 ---- .../v2alpha/migration_error_details.proto | 62 - .../migration/v2alpha/migration_metrics.proto | 111 -- .../migration/v2alpha/migration_service.proto | 247 ---- .../migration/v2alpha/translation_task.proto | 207 --- ...ation_service.create_migration_workflow.js | 67 - ...ation_service.delete_migration_workflow.js | 62 - ...migration_service.get_migration_subtask.js | 66 - ...igration_service.get_migration_workflow.js | 66 - ...gration_service.list_migration_subtasks.js | 86 -- ...ration_service.list_migration_workflows.js | 80 -- ...ration_service.start_migration_workflow.js | 62 - ...ogle.cloud.bigquery.migration.v2alpha.json | 335 ----- owl-bot-staging/v2alpha/src/index.ts | 25 - .../v2alpha/src/v2alpha/gapic_metadata.json | 101 -- owl-bot-staging/v2alpha/src/v2alpha/index.ts | 19 - .../src/v2alpha/migration_service_client.ts | 1256 ----------------- .../migration_service_client_config.json | 73 - .../v2alpha/migration_service_proto_list.json | 8 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 32 - .../v2alpha/system-test/install.ts | 49 - .../test/gapic_migration_service_v2alpha.ts | 1256 ----------------- owl-bot-staging/v2alpha/tsconfig.json | 19 - owl-bot-staging/v2alpha/webpack.config.js | 64 - src/v2/migration_service_client.ts | 49 +- src/v2alpha/migration_service_client.ts | 49 +- 71 files changed, 62 insertions(+), 9797 deletions(-) delete mode 100644 owl-bot-staging/v2/.eslintignore delete mode 100644 owl-bot-staging/v2/.eslintrc.json delete mode 100644 owl-bot-staging/v2/.gitignore delete mode 100644 owl-bot-staging/v2/.jsdoc.js delete mode 100644 owl-bot-staging/v2/.mocharc.js delete mode 100644 owl-bot-staging/v2/.prettierrc.js delete mode 100644 owl-bot-staging/v2/README.md delete mode 100644 owl-bot-staging/v2/linkinator.config.json delete mode 100644 owl-bot-staging/v2/package.json delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json delete mode 100644 owl-bot-staging/v2/src/index.ts delete mode 100644 owl-bot-staging/v2/src/v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/src/v2/index.ts delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_client.ts delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_client_config.json delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_proto_list.json delete mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v2/system-test/install.ts delete mode 100644 owl-bot-staging/v2/test/gapic_migration_service_v2.ts delete mode 100644 owl-bot-staging/v2/tsconfig.json delete mode 100644 owl-bot-staging/v2/webpack.config.js delete mode 100644 owl-bot-staging/v2alpha/.eslintignore delete mode 100644 owl-bot-staging/v2alpha/.eslintrc.json delete mode 100644 owl-bot-staging/v2alpha/.gitignore delete mode 100644 owl-bot-staging/v2alpha/.jsdoc.js delete mode 100644 owl-bot-staging/v2alpha/.mocharc.js delete mode 100644 owl-bot-staging/v2alpha/.prettierrc.js delete mode 100644 owl-bot-staging/v2alpha/README.md delete mode 100644 owl-bot-staging/v2alpha/linkinator.config.json delete mode 100644 owl-bot-staging/v2alpha/package.json delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json delete mode 100644 owl-bot-staging/v2alpha/src/index.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/index.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json delete mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v2alpha/system-test/install.ts delete mode 100644 owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts delete mode 100644 owl-bot-staging/v2alpha/tsconfig.json delete mode 100644 owl-bot-staging/v2alpha/webpack.config.js diff --git a/owl-bot-staging/v2/.eslintignore b/owl-bot-staging/v2/.eslintignore deleted file mode 100644 index cfc348e..0000000 --- a/owl-bot-staging/v2/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v2/.eslintrc.json b/owl-bot-staging/v2/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/owl-bot-staging/v2/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v2/.gitignore b/owl-bot-staging/v2/.gitignore deleted file mode 100644 index 5d32b23..0000000 --- a/owl-bot-staging/v2/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v2/.jsdoc.js b/owl-bot-staging/v2/.jsdoc.js deleted file mode 100644 index c3c1e3d..0000000 --- a/owl-bot-staging/v2/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/bigquery-migration', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v2/.mocharc.js b/owl-bot-staging/v2/.mocharc.js deleted file mode 100644 index 481c522..0000000 --- a/owl-bot-staging/v2/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v2/.prettierrc.js b/owl-bot-staging/v2/.prettierrc.js deleted file mode 100644 index 494e147..0000000 --- a/owl-bot-staging/v2/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v2/README.md b/owl-bot-staging/v2/README.md deleted file mode 100644 index 4e0341c..0000000 --- a/owl-bot-staging/v2/README.md +++ /dev/null @@ -1 +0,0 @@ -Migration: Nodejs Client diff --git a/owl-bot-staging/v2/linkinator.config.json b/owl-bot-staging/v2/linkinator.config.json deleted file mode 100644 index befd23c..0000000 --- a/owl-bot-staging/v2/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v2/package.json b/owl-bot-staging/v2/package.json deleted file mode 100644 index 8a9a77d..0000000 --- a/owl-bot-staging/v2/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "@google-cloud/bigquery-migration", - "version": "0.1.0", - "description": "Migration client for Node.js", - "repository": "googleapis/nodejs-migration", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google migration", - "migration", - "migration service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^3.3.0" - }, - "devDependencies": { - "@types/mocha": "^9.1.1", - "@types/node": "^16.11.56", - "@types/sinon": "^10.0.13", - "c8": "^7.12.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.11", - "jsdoc-fresh": "^2.0.1", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^4.0.2", - "mocha": "^10.0.0", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^8.4.0", - "typescript": "^4.8.2", - "webpack": "^4.46.0", - "webpack-cli": "^4.10.0" - }, - "engines": { - "node": ">=v12" - } -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto deleted file mode 100644 index 7d77bae..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto +++ /dev/null @@ -1,233 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2/migration_metrics.proto"; -import "google/cloud/bigquery/migration/v2/translation_config.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationEntitiesProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// A migration workflow which specifies what needs to be done for an EDW -// migration. -message MigrationWorkflow { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}" - }; - - // Possible migration workflow states. - enum State { - // Workflow state is unspecified. - STATE_UNSPECIFIED = 0; - - // Workflow is in draft status, i.e. tasks are not yet eligible for - // execution. - DRAFT = 1; - - // Workflow is running (i.e. tasks are eligible for execution). - RUNNING = 2; - - // Workflow is paused. Tasks currently in progress may continue, but no - // further tasks will be scheduled. - PAUSED = 3; - - // Workflow is complete. There should not be any task in a non-terminal - // state, but if they are (e.g. forced termination), they will not be - // scheduled. - COMPLETED = 4; - } - - // Output only. Immutable. The unique identifier for the migration workflow. - // The ID is server-generated. - // - // Example: `projects/123/locations/us/workflows/345` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The display name of the workflow. This can be set to give a workflow - // a descriptive name. There is no guarantee or enforcement of uniqueness. - string display_name = 6; - - // The tasks in a workflow in a named map. The name (i.e. key) has no - // meaning and is merely a convenient way to address a specific task - // in a workflow. - map tasks = 2; - - // Output only. That status of the workflow. - State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the workflow was created. - google.protobuf.Timestamp create_time = 4; - - // Time when the workflow was last updated. - google.protobuf.Timestamp last_update_time = 5; -} - -// A single task for a migration which has details about the configuration of -// the task. -message MigrationTask { - // Possible states of a migration task. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The task is waiting for orchestration. - PENDING = 1; - - // The task is assigned to an orchestrator. - ORCHESTRATING = 2; - - // The task is running, i.e. its subtasks are ready for execution. - RUNNING = 3; - - // Tha task is paused. Assigned subtasks can continue, but no new subtasks - // will be scheduled. - PAUSED = 4; - - // The task finished successfully. - SUCCEEDED = 5; - - // The task finished unsuccessfully. - FAILED = 6; - } - - // The details of the task. - oneof task_details { - // Task configuration for Batch SQL Translation. - TranslationConfigDetails translation_config_details = 14; - } - - // Output only. Immutable. The unique identifier for the migration task. The - // ID is server-generated. - string id = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The type of the task. This must be one of the supported task types: - // Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ, - // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, - // Translation_Snowflake2BQ, Translation_Netezza2BQ, - // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, - // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. - string type = 2; - - // Output only. The current state of the task. - State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in - // FAILED state. - google.rpc.ErrorInfo processing_error = 5 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the task was created. - google.protobuf.Timestamp create_time = 6; - - // Time when the task was last updated. - google.protobuf.Timestamp last_update_time = 7; -} - -// A subtask for a migration which carries details about the configuration of -// the subtask. The content of the details should not matter to the end user, -// but is a contract between the subtask creator and subtask worker. -message MigrationSubtask { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" - }; - - // Possible states of a migration subtask. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The subtask is ready, i.e. it is ready for execution. - ACTIVE = 1; - - // The subtask is running, i.e. it is assigned to a worker for execution. - RUNNING = 2; - - // The subtask finished successfully. - SUCCEEDED = 3; - - // The subtask finished unsuccessfully. - FAILED = 4; - - // The subtask is paused, i.e., it will not be scheduled. If it was already - // assigned,it might still finish but no new lease renewals will be granted. - PAUSED = 5; - } - - // Output only. Immutable. The resource name for the migration subtask. The ID - // is server-generated. - // - // Example: `projects/123/locations/us/workflows/345/subtasks/678` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The unique ID of the task to which this subtask belongs. - string task_id = 2; - - // The type of the Subtask. The migration service does not check whether this - // is a known type. It is up to the task creator (i.e. orchestrator or worker) - // to ensure it only creates subtasks for which there are compatible workers - // polling for Subtasks. - string type = 3; - - // Output only. The current state of the subtask. - State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in - // FAILED state. - google.rpc.ErrorInfo processing_error = 6 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Provides details to errors and issues encountered while - // processing the subtask. Presence of error details does not mean that the - // subtask failed. - repeated ResourceErrorDetail resource_error_details = 12 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The number or resources with errors. Note: This is not the total - // number of errors as each resource can have more than one error. - // This is used to indicate truncation by having a `resource_error_count` - // that is higher than the size of `resource_error_details`. - int32 resource_error_count = 13; - - // Time when the subtask was created. - google.protobuf.Timestamp create_time = 7; - - // Time when the subtask was last updated. - google.protobuf.Timestamp last_update_time = 8; - - // The metrics for the subtask. - repeated TimeSeries metrics = 11; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto deleted file mode 100644 index 199e2db..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/field_behavior.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationErrorDetailsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// Provides details for errors and the corresponding resources. -message ResourceErrorDetail { - // Required. Information about the resource where the error is located. - google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The error details for the resource. - repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. How many errors there are in total for the resource. Truncation can be - // indicated by having an `error_count` that is higher than the size of - // `error_details`. - int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Provides details for errors, e.g. issues that where encountered when -// processing a subtask. -message ErrorDetail { - // Optional. The exact location within the resource (if applicable). - ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Describes the cause of the error with structured detail. - google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Holds information about where the error is located. -message ErrorLocation { - // Optional. If applicable, denotes the line where the error occurred. A zero value - // means that there is no line information. - int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If applicable, denotes the column where the error occurred. A zero value - // means that there is no columns information. - int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto deleted file mode 100644 index e52fead..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/distribution.proto"; -import "google/api/field_behavior.proto"; -import "google/api/metric.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationMetricsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// The metrics object for a SubTask. -message TimeSeries { - // Required. The name of the metric. - // - // If the metric is not known by the service yet, it will be auto-created. - string metric = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The value type of the time series. - google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric kind of the time series. - // - // If present, it must be the same as the metric kind of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // this field specifies the metric kind of the new descriptor and must be - // either `GAUGE` (the default) or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time does not need to be supplied, but if it is supplied, it must - // equal the end time. For `DELTA` metrics, the start and end time should - // specify a non-zero interval, with subsequent points specifying contiguous - // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end - // time should specify a non-zero interval, with subsequent points specifying - // the same start time and increasing end times, until an event resets the - // cumulative value to zero and sets a new start time for the following - // points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A time interval extending just after a start time through an end time. -// If the start time is the same as the end time, then the interval -// represents a single point in time. -message TimeInterval { - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately `+/-9.2x10^18`. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately `+/-10^(+/-300)` and it has 16 significant digits of - // precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto deleted file mode 100644 index 3c1a89e..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto +++ /dev/null @@ -1,245 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2/migration_entities.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationServiceProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// Service to handle EDW migrations. -service MigrationService { - option (google.api.default_host) = "bigquerymigration.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a migration workflow. - rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - post: "/v2/{parent=projects/*/locations/*}/workflows" - body: "migration_workflow" - }; - option (google.api.method_signature) = "parent,migration_workflow"; - } - - // Gets a previously created migration workflow. - rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - get: "/v2/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration workflow. - rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { - option (google.api.http) = { - get: "/v2/{parent=projects/*/locations/*}/workflows" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a migration workflow by name. - rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Starts a previously created migration workflow. I.e., the state transitions - // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - // An error will be signaled if the state is anything other than DRAFT or - // RUNNING. - rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2/{name=projects/*/locations/*/workflows/*}:start" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Gets a previously created migration subtask. - rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { - option (google.api.http) = { - get: "/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration subtasks. - rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { - option (google.api.http) = { - get: "/v2/{parent=projects/*/locations/*/workflows/*}/subtasks" - }; - option (google.api.method_signature) = "parent"; - } -} - -// Request to create a migration workflow resource. -message CreateMigrationWorkflowRequest { - // Required. The name of the project to which this migration workflow belongs. - // Example: `projects/foo/locations/bar` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // Required. The migration workflow to create. - MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to get a previously created migration workflow. -message GetMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; -} - -// A request to list previously created migration workflows. -message ListMigrationWorkflowsRequest { - // Required. The project and location of the migration workflows to list. - // Example: `projects/123/locations/us` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; - - // The maximum number of migration workflows to return. The service may return - // fewer than this number. - int32 page_size = 3; - - // A page token, received from previous `ListMigrationWorkflows` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationWorkflows` - // must match the call that provided the page token. - string page_token = 4; -} - -// Response object for a `ListMigrationWorkflows` call. -message ListMigrationWorkflowsResponse { - // The migration workflows for the specified project / location. - repeated MigrationWorkflow migration_workflows = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a previously created migration workflow. -message DeleteMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to start a previously created migration workflow. -message StartMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to get a previously created migration subtasks. -message GetMigrationSubtaskRequest { - // Required. The unique identifier for the migration subtask. - // Example: `projects/123/locations/us/workflows/1234/subtasks/543` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to list previously created migration subtasks. -message ListMigrationSubtasksRequest { - // Required. The migration task of the subtasks to list. - // Example: `projects/123/locations/us/workflows/1234` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of migration tasks to return. The service may return - // fewer than this number. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token, received from previous `ListMigrationSubtasks` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationSubtasks` - // must match the call that provided the page token. - string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The filter to apply. This can be used to get the subtasks of a specific - // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - // task ID (not the name in the named map). - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response object for a `ListMigrationSubtasks` call. -message ListMigrationSubtasksResponse { - // The migration subtasks for the specified task. - repeated MigrationSubtask migration_subtasks = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto deleted file mode 100644 index 994140d..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto +++ /dev/null @@ -1,257 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "TranslationConfigProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// The translation config to capture necessary settings for a translation task -// and subtask. -message TranslationConfigDetails { - // The chosen path where the source for input files will be found. - oneof source_location { - // The Cloud Storage path for a directory of files to translate in a task. - string gcs_source_path = 1; - } - - // The chosen path where the destination for output files will be found. - oneof target_location { - // The Cloud Storage path to write back the corresponding input files to. - string gcs_target_path = 2; - } - - // The dialect of the input files. - Dialect source_dialect = 3; - - // The target dialect for the engine to translate the input to. - Dialect target_dialect = 4; - - // The mapping of full SQL object names from their current state to the - // desired output. - oneof output_name_mapping { - // The mapping of objects to their desired output names in list form. - ObjectNameMappingList name_mapping_list = 5; - } - - // The default source environment values for the translation. - SourceEnv source_env = 6; -} - -// The possible dialect options for translation. -message Dialect { - // The possible dialect options that this message represents. - oneof dialect_value { - // The BigQuery dialect - BigQueryDialect bigquery_dialect = 1; - - // The HiveQL dialect - HiveQLDialect hiveql_dialect = 2; - - // The Redshift dialect - RedshiftDialect redshift_dialect = 3; - - // The Teradata dialect - TeradataDialect teradata_dialect = 4; - - // The Oracle dialect - OracleDialect oracle_dialect = 5; - - // The SparkSQL dialect - SparkSQLDialect sparksql_dialect = 6; - - // The Snowflake dialect - SnowflakeDialect snowflake_dialect = 7; - - // The Netezza dialect - NetezzaDialect netezza_dialect = 8; - - // The Azure Synapse dialect - AzureSynapseDialect azure_synapse_dialect = 9; - - // The Vertica dialect - VerticaDialect vertica_dialect = 10; - - // The SQL Server dialect - SQLServerDialect sql_server_dialect = 11; - - // The Postgresql dialect - PostgresqlDialect postgresql_dialect = 12; - - // The Presto dialect - PrestoDialect presto_dialect = 13; - - // The MySQL dialect - MySQLDialect mysql_dialect = 14; - } -} - -// The dialect definition for BigQuery. -message BigQueryDialect {} - -// The dialect definition for HiveQL. -message HiveQLDialect {} - -// The dialect definition for Redshift. -message RedshiftDialect {} - -// The dialect definition for Teradata. -message TeradataDialect { - // The sub-dialect options for Teradata. - enum Mode { - // Unspecified mode. - MODE_UNSPECIFIED = 0; - - // Teradata SQL mode. - SQL = 1; - - // BTEQ mode (which includes SQL). - BTEQ = 2; - } - - // Which Teradata sub-dialect mode the user specifies. - Mode mode = 1; -} - -// The dialect definition for Oracle. -message OracleDialect {} - -// The dialect definition for SparkSQL. -message SparkSQLDialect {} - -// The dialect definition for Snowflake. -message SnowflakeDialect {} - -// The dialect definition for Netezza. -message NetezzaDialect {} - -// The dialect definition for Azure Synapse. -message AzureSynapseDialect {} - -// The dialect definition for Vertica. -message VerticaDialect {} - -// The dialect definition for SQL Server. -message SQLServerDialect {} - -// The dialect definition for Postgresql. -message PostgresqlDialect {} - -// The dialect definition for Presto. -message PrestoDialect {} - -// The dialect definition for MySQL. -message MySQLDialect {} - -// Represents a map of name mappings using a list of key:value proto messages of -// existing name to desired output name. -message ObjectNameMappingList { - // The elements of the object name map. - repeated ObjectNameMapping name_map = 1; -} - -// Represents a key-value pair of NameMappingKey to NameMappingValue to -// represent the mapping of SQL names from the input value to desired output. -message ObjectNameMapping { - // The name of the object in source that is being mapped. - NameMappingKey source = 1; - - // The desired target name of the object that is being mapped. - NameMappingValue target = 2; -} - -// The potential components of a full name mapping that will be mapped -// during translation in the source data warehouse. -message NameMappingKey { - // The type of the object that is being mapped. - enum Type { - // Unspecified name mapping type. - TYPE_UNSPECIFIED = 0; - - // The object being mapped is a database. - DATABASE = 1; - - // The object being mapped is a schema. - SCHEMA = 2; - - // The object being mapped is a relation. - RELATION = 3; - - // The object being mapped is an attribute. - ATTRIBUTE = 4; - - // The object being mapped is a relation alias. - RELATION_ALIAS = 5; - - // The object being mapped is a an attribute alias. - ATTRIBUTE_ALIAS = 6; - - // The object being mapped is a function. - FUNCTION = 7; - } - - // The type of object that is being mapped. - Type type = 1; - - // The database name (BigQuery project ID equivalent in the source data - // warehouse). - string database = 2; - - // The schema name (BigQuery dataset equivalent in the source data warehouse). - string schema = 3; - - // The relation name (BigQuery table or view equivalent in the source data - // warehouse). - string relation = 4; - - // The attribute name (BigQuery column equivalent in the source data - // warehouse). - string attribute = 5; -} - -// The potential components of a full name mapping that will be mapped -// during translation in the target data warehouse. -message NameMappingValue { - // The database name (BigQuery project ID equivalent in the target data - // warehouse). - string database = 1; - - // The schema name (BigQuery dataset equivalent in the target data warehouse). - string schema = 2; - - // The relation name (BigQuery table or view equivalent in the target data - // warehouse). - string relation = 3; - - // The attribute name (BigQuery column equivalent in the target data - // warehouse). - string attribute = 4; -} - -// Represents the default source environment values for the translation. -message SourceEnv { - // The default database name to fully qualify SQL objects when their database - // name is missing. - string default_database = 1; - - // The schema search path. When SQL objects are missing schema name, - // translation engine will search through this list to find the value. - repeated string schema_search_path = 2; -} diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js deleted file mode 100644 index 8301c3a..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, migrationWorkflow) { - // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - */ - // const parent = 'abc123' - /** - * Required. The migration workflow to create. - */ - // const migrationWorkflow = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callCreateMigrationWorkflow() { - // Construct request - const request = { - parent, - migrationWorkflow, - }; - - // Run request - const response = await migrationClient.createMigrationWorkflow(request); - console.log(response); - } - - callCreateMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js deleted file mode 100644 index 9f0651e..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callDeleteMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.deleteMigrationWorkflow(request); - console.log(response); - } - - callDeleteMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js deleted file mode 100644 index 25de9e0..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - */ - // const name = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationSubtask() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationSubtask(request); - console.log(response); - } - - callGetMigrationSubtask(); - // [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js deleted file mode 100644 index 52ab5cd..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationWorkflow(request); - console.log(response); - } - - callGetMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js deleted file mode 100644 index c5c7ed0..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const parent = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - /** - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - /** - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - */ - // const filter = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationSubtasks() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationSubtasksAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationSubtasks(); - // [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js deleted file mode 100644 index ebd2127..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - */ - // const parent = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - /** - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationWorkflows() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationWorkflowsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationWorkflows(); - // [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js deleted file mode 100644 index 7f8257d..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callStartMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.startMigrationWorkflow(request); - console.log(response); - } - - callStartMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json deleted file mode 100644 index 81ec8bb..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json +++ /dev/null @@ -1,335 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-migration", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2", - "version": "v2" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", - "title": "MigrationService createMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Creates a migration workflow.", - "canonical": true, - "file": "migration_service.create_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 59, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "migration_workflow", - "type": ".google.cloud.bigquery.migration.v2.MigrationWorkflow" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", - "title": "MigrationService getMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration workflow.", - "canonical": true, - "file": "migration_service.get_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", - "title": "MigrationService listMigrationWorkflows Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration workflow.", - "canonical": true, - "file": "migration_service.list_migration_workflows.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 72, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", - "title": "MigrationService deleteMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Deletes a migration workflow by name.", - "canonical": true, - "file": "migration_service.delete_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", - "title": "MigrationService startMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", - "canonical": true, - "file": "migration_service.start_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", - "title": "MigrationService getMigrationSubtask Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration subtask.", - "canonical": true, - "file": "migration_service.get_migration_subtask.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationSubtask", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", - "title": "MigrationService listMigrationSubtasks Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration subtasks.", - "canonical": true, - "file": "migration_service.list_migration_subtasks.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 78, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v2/src/index.ts b/owl-bot-staging/v2/src/index.ts deleted file mode 100644 index 35a8fd9..0000000 --- a/owl-bot-staging/v2/src/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v2 from './v2'; -const MigrationServiceClient = v2.MigrationServiceClient; -type MigrationServiceClient = v2.MigrationServiceClient; -export {v2, MigrationServiceClient}; -export default {v2, MigrationServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v2/src/v2/gapic_metadata.json b/owl-bot-staging/v2/src/v2/gapic_metadata.json deleted file mode 100644 index 1b6a33c..0000000 --- a/owl-bot-staging/v2/src/v2/gapic_metadata.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.migration.v2", - "libraryPackage": "@google-cloud/bigquery-migration", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/src/v2/index.ts b/owl-bot-staging/v2/src/v2/index.ts deleted file mode 100644 index f75d208..0000000 --- a/owl-bot-staging/v2/src/v2/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2/src/v2/migration_service_client.ts b/owl-bot-staging/v2/src/v2/migration_service_client.ts deleted file mode 100644 index cfb3f86..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_client.ts +++ /dev/null @@ -1,1256 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import type * as gax from 'google-gax'; -import type {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; -import {Transform} from 'stream'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v2/migration_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './migration_service_client_config.json'; -const version = require('../../../package.json').version; - -/** - * Service to handle EDW migrations. - * @class - * @memberof v2 - */ -export class MigrationServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - migrationServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of MigrationServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you - * need to avoid loading the default gRPC version and want to use the fallback - * HTTP implementation. Load only fallback version and pass it to the constructor: - * ``` - * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC - * const client = new MigrationServiceClient({fallback: 'rest'}, gax); - * ``` - */ - constructor(opts?: ClientOptions, gaxInstance?: typeof gax | typeof gax.fallback) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof MigrationServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Load google-gax module synchronously if needed - if (!gaxInstance) { - gaxInstance = require('google-gax') as typeof gax; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' - ), - migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listMigrationWorkflows: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), - listMigrationSubtasks: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.migration.v2.MigrationService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = this._gaxModule.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.migrationServiceStub) { - return this.migrationServiceStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.migration.v2.MigrationService. - this.migrationServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2.MigrationService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.migration.v2.MigrationService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const migrationServiceStubMethods = - ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; - for (const methodName of migrationServiceStubMethods) { - const callPromise = this.migrationServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor, - this._opts.fallback - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.migrationServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates a migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - * @param {google.cloud.bigquery.migration.v2.MigrationWorkflow} request.migrationWorkflow - * Required. The migration workflow to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.create_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async - */ - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.get_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async - */ - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationWorkflow(request, options, callback); - } -/** - * Deletes a migration workflow by name. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.delete_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async - */ - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); - } -/** - * Starts a previously created migration workflow. I.e., the state transitions - * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - * An error will be signaled if the state is anything other than DRAFT or - * RUNNING. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.start_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async - */ - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.startMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration subtask. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.get_migration_subtask.js - * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async - */ - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationSubtask(request, options, callback); - } - - /** - * Lists previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse - ]>; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationWorkflows(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflowsStream( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationWorkflows`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.list_migration_workflows.js - * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async - */ - listMigrationWorkflowsAsync( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.asyncIterate( - this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - /** - * Lists previously created migration subtasks. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse - ]>; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationSubtasks(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasksStream( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationSubtasks`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.list_migration_subtasks.js - * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async - */ - listMigrationSubtasksAsync( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.asyncIterate( - this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified migrationSubtask resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @param {string} subtask - * @returns {string} Resource name string. - */ - migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { - return this.pathTemplates.migrationSubtaskPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - subtask: subtask, - }); - } - - /** - * Parse the project from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; - } - - /** - * Parse the location from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; - } - - /** - * Parse the workflow from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; - } - - /** - * Parse the subtask from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the subtask. - */ - matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; - } - - /** - * Return a fully-qualified migrationWorkflow resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @returns {string} Resource name string. - */ - migrationWorkflowPath(project:string,location:string,workflow:string) { - return this.pathTemplates.migrationWorkflowPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - }); - } - - /** - * Parse the project from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; - } - - /** - * Parse the location from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; - } - - /** - * Parse the workflow from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.migrationServiceStub && !this._terminated) { - return this.migrationServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v2/src/v2/migration_service_client_config.json b/owl-bot-staging/v2/src/v2/migration_service_client_config.json deleted file mode 100644 index 5832815..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_client_config.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.migration.v2.MigrationService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationWorkflows": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "DeleteMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "GetMigrationSubtask": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "ListMigrationSubtasks": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json deleted file mode 100644 index 57df7ab..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/migration/v2/migration_entities.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_error_details.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_metrics.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_service.proto", - "../../protos/google/cloud/bigquery/migration/v2/translation_config.proto" -] diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js deleted file mode 100644 index ecc7e4b..0000000 --- a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/bigquery-migration'); - -function main() { - const migrationServiceClient = new migration.MigrationServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 80fbe2d..0000000 --- a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; - -// check that the client class type name can be used -function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const migrationServiceClient = new MigrationServiceClient(); - doStuffWithMigrationServiceClient(migrationServiceClient); -} - -main(); diff --git a/owl-bot-staging/v2/system-test/install.ts b/owl-bot-staging/v2/system-test/install.ts deleted file mode 100644 index 557a575..0000000 --- a/owl-bot-staging/v2/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {packNTest} from 'pack-n-play'; -import {readFileSync} from 'fs'; -import {describe, it} from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts deleted file mode 100644 index 061c58c..0000000 --- a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts +++ /dev/null @@ -1,1256 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import {describe, it} from 'mocha'; -import * as migrationserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v2.MigrationServiceClient', () => { - describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = migrationserviceModule.v2.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - }); - - describe('createMigrationWorkflow', () => { - it('invokes createMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.createMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationWorkflow', () => { - it('invokes getMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - }); - }); - - describe('deleteMigrationWorkflow', () => { - it('invokes deleteMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.deleteMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - }); - }); - - describe('startMigrationWorkflow', () => { - it('invokes startMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.startMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationSubtask', () => { - it('invokes getMigrationSubtask without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationSubtask(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationSubtask( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationSubtask with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - }); - }); - - describe('listMigrationWorkflows', () => { - it('invokes listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationWorkflows(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflows without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationWorkflows( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationWorkflows(request), expectedError); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflowsStream without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationWorkflowsStream with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; - const iterable = client.listMigrationWorkflowsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationWorkflowsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('listMigrationSubtasks', () => { - it('invokes listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationSubtasks(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasks without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationSubtasks( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationSubtasks(request), expectedError); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasksStream without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationSubtasksStream with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; - const iterable = client.listMigrationSubtasksAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationSubtasksAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationSubtask', () => { - const fakePath = "/rendered/path/migrationSubtask"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - subtask: "subtaskValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationSubtaskPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationSubtaskPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationSubtaskPath', () => { - const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationSubtaskName', () => { - const result = client.matchProjectFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationSubtaskName', () => { - const result = client.matchLocationFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationSubtaskName', () => { - const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchSubtaskFromMigrationSubtaskName', () => { - const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "subtaskValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationWorkflow', () => { - const fakePath = "/rendered/path/migrationWorkflow"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationWorkflowPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationWorkflowPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationWorkflowPath', () => { - const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationWorkflowName', () => { - const result = client.matchProjectFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationWorkflowName', () => { - const result = client.matchLocationFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationWorkflowName', () => { - const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v2/tsconfig.json b/owl-bot-staging/v2/tsconfig.json deleted file mode 100644 index c78f1c8..0000000 --- a/owl-bot-staging/v2/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v2/webpack.config.js b/owl-bot-staging/v2/webpack.config.js deleted file mode 100644 index 9178c4b..0000000 --- a/owl-bot-staging/v2/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'MigrationService', - filename: './migration-service.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -}; diff --git a/owl-bot-staging/v2alpha/.eslintignore b/owl-bot-staging/v2alpha/.eslintignore deleted file mode 100644 index cfc348e..0000000 --- a/owl-bot-staging/v2alpha/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v2alpha/.eslintrc.json b/owl-bot-staging/v2alpha/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/owl-bot-staging/v2alpha/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v2alpha/.gitignore b/owl-bot-staging/v2alpha/.gitignore deleted file mode 100644 index 5d32b23..0000000 --- a/owl-bot-staging/v2alpha/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v2alpha/.jsdoc.js b/owl-bot-staging/v2alpha/.jsdoc.js deleted file mode 100644 index c3c1e3d..0000000 --- a/owl-bot-staging/v2alpha/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/bigquery-migration', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v2alpha/.mocharc.js b/owl-bot-staging/v2alpha/.mocharc.js deleted file mode 100644 index 481c522..0000000 --- a/owl-bot-staging/v2alpha/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v2alpha/.prettierrc.js b/owl-bot-staging/v2alpha/.prettierrc.js deleted file mode 100644 index 494e147..0000000 --- a/owl-bot-staging/v2alpha/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v2alpha/README.md b/owl-bot-staging/v2alpha/README.md deleted file mode 100644 index 4e0341c..0000000 --- a/owl-bot-staging/v2alpha/README.md +++ /dev/null @@ -1 +0,0 @@ -Migration: Nodejs Client diff --git a/owl-bot-staging/v2alpha/linkinator.config.json b/owl-bot-staging/v2alpha/linkinator.config.json deleted file mode 100644 index befd23c..0000000 --- a/owl-bot-staging/v2alpha/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v2alpha/package.json b/owl-bot-staging/v2alpha/package.json deleted file mode 100644 index 8a9a77d..0000000 --- a/owl-bot-staging/v2alpha/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "@google-cloud/bigquery-migration", - "version": "0.1.0", - "description": "Migration client for Node.js", - "repository": "googleapis/nodejs-migration", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google migration", - "migration", - "migration service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^3.3.0" - }, - "devDependencies": { - "@types/mocha": "^9.1.1", - "@types/node": "^16.11.56", - "@types/sinon": "^10.0.13", - "c8": "^7.12.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.11", - "jsdoc-fresh": "^2.0.1", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^4.0.2", - "mocha": "^10.0.0", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^8.4.0", - "typescript": "^4.8.2", - "webpack": "^4.46.0", - "webpack-cli": "^4.10.0" - }, - "engines": { - "node": ">=v12" - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto deleted file mode 100644 index 0c6ea13..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "AssessmentTaskProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Assessment task config. -message AssessmentTaskDetails { - // Required. The Cloud Storage path for assessment input files. - string input_path = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The BigQuery dataset for output. - string output_dataset = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. An optional Cloud Storage path to write the query logs (which is - // then used as an input path on the translation task) - string querylogs_path = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data source or data warehouse type (eg: TERADATA/REDSHIFT) - // from which the input data is extracted. - string data_source = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// Details for an assessment task orchestration result. -message AssessmentOrchestrationResultDetails { - // Optional. The version used for the output table schemas. - string output_tables_schema_version = 1 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto deleted file mode 100644 index 50d4c75..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto +++ /dev/null @@ -1,244 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2alpha/assessment_task.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; -import "google/cloud/bigquery/migration/v2alpha/translation_task.proto"; -import "google/protobuf/any.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationEntitiesProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// A migration workflow which specifies what needs to be done for an EDW -// migration. -message MigrationWorkflow { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}" - }; - - // Possible migration workflow states. - enum State { - // Workflow state is unspecified. - STATE_UNSPECIFIED = 0; - - // Workflow is in draft status, i.e. tasks are not yet eligible for - // execution. - DRAFT = 1; - - // Workflow is running (i.e. tasks are eligible for execution). - RUNNING = 2; - - // Workflow is paused. Tasks currently in progress may continue, but no - // further tasks will be scheduled. - PAUSED = 3; - - // Workflow is complete. There should not be any task in a non-terminal - // state, but if they are (e.g. forced termination), they will not be - // scheduled. - COMPLETED = 4; - } - - // Output only. Immutable. The unique identifier for the migration workflow. The ID is - // server-generated. - // - // Example: `projects/123/locations/us/workflows/345` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The display name of the workflow. This can be set to give a workflow - // a descriptive name. There is no guarantee or enforcement of uniqueness. - string display_name = 6; - - // The tasks in a workflow in a named map. The name (i.e. key) has no - // meaning and is merely a convenient way to address a specific task - // in a workflow. - map tasks = 2; - - // Output only. That status of the workflow. - State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the workflow was created. - google.protobuf.Timestamp create_time = 4; - - // Time when the workflow was last updated. - google.protobuf.Timestamp last_update_time = 5; -} - -// A single task for a migration which has details about the configuration of -// the task. -message MigrationTask { - // Possible states of a migration task. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The task is waiting for orchestration. - PENDING = 1; - - // The task is assigned to an orchestrator. - ORCHESTRATING = 2; - - // The task is running, i.e. its subtasks are ready for execution. - RUNNING = 3; - - // Tha task is paused. Assigned subtasks can continue, but no new subtasks - // will be scheduled. - PAUSED = 4; - - // The task finished successfully. - SUCCEEDED = 5; - - // The task finished unsuccessfully. - FAILED = 6; - } - - // The details of the task. - oneof task_details { - // Task configuration for Assessment. - AssessmentTaskDetails assessment_task_details = 12; - - // Task configuration for Batch/Offline SQL Translation. - TranslationTaskDetails translation_task_details = 13; - } - - // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. - string id = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The type of the task. This must be a supported task type. - string type = 2; - - // DEPRECATED! Use one of the task_details below. - // The details of the task. The type URL must be one of the supported task - // details messages and correspond to the Task's type. - google.protobuf.Any details = 3; - - // Output only. The current state of the task. - State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in FAILED state. - google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the task was created. - google.protobuf.Timestamp create_time = 6; - - // Time when the task was last updated. - google.protobuf.Timestamp last_update_time = 7; - - // Output only. Additional information about the orchestration. - MigrationTaskOrchestrationResult orchestration_result = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A subtask for a migration which carries details about the configuration of -// the subtask. The content of the details should not matter to the end user, -// but is a contract between the subtask creator and subtask worker. -message MigrationSubtask { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" - }; - - // Possible states of a migration subtask. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The subtask is ready, i.e. it is ready for execution. - ACTIVE = 1; - - // The subtask is running, i.e. it is assigned to a worker for execution. - RUNNING = 2; - - // The subtask finished successfully. - SUCCEEDED = 3; - - // The subtask finished unsuccessfully. - FAILED = 4; - - // The subtask is paused, i.e., it will not be scheduled. If it was already - // assigned,it might still finish but no new lease renewals will be granted. - PAUSED = 5; - } - - // Output only. Immutable. The resource name for the migration subtask. The ID is - // server-generated. - // - // Example: `projects/123/locations/us/workflows/345/subtasks/678` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The unique ID of the task to which this subtask belongs. - string task_id = 2; - - // The type of the Subtask. The migration service does not check whether this - // is a known type. It is up to the task creator (i.e. orchestrator or worker) - // to ensure it only creates subtasks for which there are compatible workers - // polling for Subtasks. - string type = 3; - - // Output only. The current state of the subtask. - State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in FAILED state. - google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Provides details to errors and issues encountered while processing the - // subtask. Presence of error details does not mean that the subtask failed. - repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The number or resources with errors. Note: This is not the total - // number of errors as each resource can have more than one error. - // This is used to indicate truncation by having a `resource_error_count` - // that is higher than the size of `resource_error_details`. - int32 resource_error_count = 13; - - // Time when the subtask was created. - google.protobuf.Timestamp create_time = 7; - - // Time when the subtask was last updated. - google.protobuf.Timestamp last_update_time = 8; - - // The metrics for the subtask. - repeated TimeSeries metrics = 11; -} - -// Additional information from the orchestrator when it is done with the -// task orchestration. -message MigrationTaskOrchestrationResult { - // Details specific to the task type. - oneof details { - // Details specific to assessment task types. - AssessmentOrchestrationResultDetails assessment_details = 1; - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto deleted file mode 100644 index 89dac5e..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationErrorDetailsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Provides details for errors and the corresponding resources. -message ResourceErrorDetail { - // Required. Information about the resource where the error is located. - google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The error details for the resource. - repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. How many errors there are in total for the resource. Truncation can be - // indicated by having an `error_count` that is higher than the size of - // `error_details`. - int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Provides details for errors, e.g. issues that where encountered when -// processing a subtask. -message ErrorDetail { - // Optional. The exact location within the resource (if applicable). - ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Describes the cause of the error with structured detail. - google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Holds information about where the error is located. -message ErrorLocation { - // Optional. If applicable, denotes the line where the error occurred. A zero value - // means that there is no line information. - int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If applicable, denotes the column where the error occurred. A zero value - // means that there is no columns information. - int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto deleted file mode 100644 index ce60dd2..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/distribution.proto"; -import "google/api/field_behavior.proto"; -import "google/api/metric.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationMetricsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// The metrics object for a SubTask. -message TimeSeries { - // Required. The name of the metric. - // - // If the metric is not known by the service yet, it will be auto-created. - string metric = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The value type of the time series. - google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric kind of the time series. - // - // If present, it must be the same as the metric kind of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // this field specifies the metric kind of the new descriptor and must be - // either `GAUGE` (the default) or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time does not need to be supplied, but if it is supplied, it must - // equal the end time. For `DELTA` metrics, the start and end time should - // specify a non-zero interval, with subsequent points specifying contiguous - // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end - // time should specify a non-zero interval, with subsequent points specifying - // the same start time and increasing end times, until an event resets the - // cumulative value to zero and sets a new start time for the following - // points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A time interval extending just after a start time through an end time. -// If the start time is the same as the end time, then the interval -// represents a single point in time. -message TimeInterval { - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately +/-9.2x10^18. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately +/-10^(+/-300) and it has 16 significant digits of - // precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto deleted file mode 100644 index 9a184a1..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto +++ /dev/null @@ -1,247 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationServiceProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Service to handle EDW migrations. -service MigrationService { - option (google.api.default_host) = "bigquerymigration.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a migration workflow. - rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - post: "/v2alpha/{parent=projects/*/locations/*}/workflows" - body: "migration_workflow" - }; - option (google.api.method_signature) = "parent,migration_workflow"; - } - - // Gets a previously created migration workflow. - rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration workflow. - rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { - option (google.api.http) = { - get: "/v2alpha/{parent=projects/*/locations/*}/workflows" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a migration workflow by name. - rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Starts a previously created migration workflow. I.e., the state transitions - // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - // An error will be signaled if the state is anything other than DRAFT or - // RUNNING. - rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Gets a previously created migration subtask. - rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { - option (google.api.http) = { - get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration subtasks. - rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { - option (google.api.http) = { - get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" - }; - option (google.api.method_signature) = "parent"; - } -} - -// Request to create a migration workflow resource. -message CreateMigrationWorkflowRequest { - // Required. The name of the project to which this migration workflow belongs. - // Example: `projects/foo/locations/bar` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // Required. The migration workflow to create. - MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to get a previously created migration workflow. -message GetMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; -} - -// A request to list previously created migration workflows. -message ListMigrationWorkflowsRequest { - // Required. The project and location of the migration workflows to list. - // Example: `projects/123/locations/us` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; - - // The maximum number of migration workflows to return. The service may return - // fewer than this number. - int32 page_size = 3; - - // A page token, received from previous `ListMigrationWorkflows` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationWorkflows` - // must match the call that provided the page token. - string page_token = 4; -} - -// Response object for a `ListMigrationWorkflows` call. -message ListMigrationWorkflowsResponse { - // The migration workflows for the specified project / location. - repeated MigrationWorkflow migration_workflows = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a previously created migration workflow. -message DeleteMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to start a previously created migration workflow. -message StartMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to get a previously created migration subtasks. -message GetMigrationSubtaskRequest { - // Required. The unique identifier for the migration subtask. - // Example: `projects/123/locations/us/workflows/1234/subtasks/543` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to list previously created migration subtasks. -message ListMigrationSubtasksRequest { - // Required. The migration task of the subtasks to list. - // Example: `projects/123/locations/us/workflows/1234` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of migration tasks to return. The service may return - // fewer than this number. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token, received from previous `ListMigrationSubtasks` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationSubtasks` - // must match the call that provided the page token. - string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The filter to apply. This can be used to get the subtasks of a specific - // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - // task ID (not the name in the named map). - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response object for a `ListMigrationSubtasks` call. -message ListMigrationSubtasksResponse { - // The migration subtasks for the specified task. - repeated MigrationSubtask migration_subtasks = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto deleted file mode 100644 index bf4b27e..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "TranslationTaskProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Mapping between an input and output file to be translated in a subtask. -message TranslationFileMapping { - // The Cloud Storage path for a file to translation in a subtask. - string input_path = 1; - - // The Cloud Storage path to write back the corresponding input file to. - string output_path = 2; -} - -// The translation task config to capture necessary settings for a translation -// task and subtask. -message TranslationTaskDetails { - // The file encoding types. - enum FileEncoding { - // File encoding setting is not specified. - FILE_ENCODING_UNSPECIFIED = 0; - - // File encoding is UTF_8. - UTF_8 = 1; - - // File encoding is ISO_8859_1. - ISO_8859_1 = 2; - - // File encoding is US_ASCII. - US_ASCII = 3; - - // File encoding is UTF_16. - UTF_16 = 4; - - // File encoding is UTF_16LE. - UTF_16LE = 5; - - // File encoding is UTF_16BE. - UTF_16BE = 6; - } - - // The special token data type. - enum TokenType { - // Token type is not specified. - TOKEN_TYPE_UNSPECIFIED = 0; - - // Token type as string. - STRING = 1; - - // Token type as integer. - INT64 = 2; - - // Token type as numeric. - NUMERIC = 3; - - // Token type as boolean. - BOOL = 4; - - // Token type as float. - FLOAT64 = 5; - - // Token type as date. - DATE = 6; - - // Token type as timestamp. - TIMESTAMP = 7; - } - - // The language specific settings for the translation task. - oneof language_options { - // The Teradata SQL specific settings for the translation task. - TeradataOptions teradata_options = 10; - - // The BTEQ specific settings for the translation task. - BteqOptions bteq_options = 11; - } - - // The Cloud Storage path for translation input files. - string input_path = 1; - - // The Cloud Storage path for translation output files. - string output_path = 2; - - // Cloud Storage files to be processed for translation. - repeated TranslationFileMapping file_paths = 12; - - // The Cloud Storage path to DDL files as table schema to assist semantic - // translation. - string schema_path = 3; - - // The file encoding type. - FileEncoding file_encoding = 4; - - // The settings for SQL identifiers. - IdentifierSettings identifier_settings = 5; - - // The map capturing special tokens to be replaced during translation. The key - // is special token in string. The value is the token data type. This is used - // to translate SQL query template which contains special token as place - // holder. The special token makes a query invalid to parse. This map will be - // applied to annotate those special token with types to let parser understand - // how to parse them into proper structure with type information. - map special_token_map = 6; - - // The filter applied to translation details. - Filter filter = 7; - - // Specifies the exact name of the bigquery table ("dataset.table") to be used - // for surfacing raw translation errors. If the table does not exist, we will - // create it. If it already exists and the schema is the same, we will re-use. - // If the table exists and the schema is different, we will throw an error. - string translation_exception_table = 13; -} - -// The filter applied to fields of translation details. -message Filter { - // The list of prefixes used to exclude processing for input files. - repeated string input_file_exclusion_prefixes = 1; -} - -// Settings related to SQL identifiers. -message IdentifierSettings { - // The identifier case type. - enum IdentifierCase { - // The identifier case is not specified. - IDENTIFIER_CASE_UNSPECIFIED = 0; - - // Identifiers' cases will be kept as the original cases. - ORIGINAL = 1; - - // Identifiers will be in upper cases. - UPPER = 2; - - // Identifiers will be in lower cases. - LOWER = 3; - } - - // The SQL identifier rewrite mode. - enum IdentifierRewriteMode { - // SQL Identifier rewrite mode is unspecified. - IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0; - - // SQL identifiers won't be rewrite. - NONE = 1; - - // All SQL identifiers will be rewrite. - REWRITE_ALL = 2; - } - - // The setting to control output queries' identifier case. - IdentifierCase output_identifier_case = 1; - - // Specifies the rewrite mode for SQL identifiers. - IdentifierRewriteMode identifier_rewrite_mode = 2; -} - -// Teradata SQL specific translation task related settings. -message TeradataOptions { - -} - -// BTEQ translation task related settings. -message BteqOptions { - // Specifies the project and dataset in BigQuery that will be used for - // external table creation during the translation. - DatasetReference project_dataset = 1; - - // The Cloud Storage location to be used as the default path for files that - // are not otherwise specified in the file replacement map. - string default_path_uri = 2; - - // Maps the local paths that are used in BTEQ scripts (the keys) to the paths - // in Cloud Storage that should be used in their stead in the translation (the - // value). - map file_replacement_map = 3; -} - -// Reference to a BigQuery dataset. -message DatasetReference { - // A unique ID for this dataset, without the project name. The ID - // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). - // The maximum length is 1,024 characters. - string dataset_id = 1; - - // The ID of the project containing this dataset. - string project_id = 2; -} diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js deleted file mode 100644 index d8613fe..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, migrationWorkflow) { - // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - */ - // const parent = 'abc123' - /** - * Required. The migration workflow to create. - */ - // const migrationWorkflow = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callCreateMigrationWorkflow() { - // Construct request - const request = { - parent, - migrationWorkflow, - }; - - // Run request - const response = await migrationClient.createMigrationWorkflow(request); - console.log(response); - } - - callCreateMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js deleted file mode 100644 index e17c7ee..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callDeleteMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.deleteMigrationWorkflow(request); - console.log(response); - } - - callDeleteMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js deleted file mode 100644 index a17add1..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - */ - // const name = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationSubtask() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationSubtask(request); - console.log(response); - } - - callGetMigrationSubtask(); - // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js deleted file mode 100644 index 1519196..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationWorkflow(request); - console.log(response); - } - - callGetMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js deleted file mode 100644 index 99419dd..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const parent = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - /** - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - /** - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - */ - // const filter = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationSubtasks() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationSubtasksAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationSubtasks(); - // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js deleted file mode 100644 index 31c0263..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - */ - // const parent = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - /** - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationWorkflows() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationWorkflowsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationWorkflows(); - // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js deleted file mode 100644 index 85288c6..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callStartMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.startMigrationWorkflow(request); - console.log(response); - } - - callStartMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json deleted file mode 100644 index c5336c2..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json +++ /dev/null @@ -1,335 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-migration", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2alpha", - "version": "v2alpha" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", - "title": "MigrationService createMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Creates a migration workflow.", - "canonical": true, - "file": "migration_service.create_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 59, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "migration_workflow", - "type": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", - "title": "MigrationService getMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration workflow.", - "canonical": true, - "file": "migration_service.get_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", - "title": "MigrationService listMigrationWorkflows Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration workflow.", - "canonical": true, - "file": "migration_service.list_migration_workflows.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 72, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", - "title": "MigrationService deleteMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Deletes a migration workflow by name.", - "canonical": true, - "file": "migration_service.delete_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", - "title": "MigrationService startMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", - "canonical": true, - "file": "migration_service.start_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", - "title": "MigrationService getMigrationSubtask Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration subtask.", - "canonical": true, - "file": "migration_service.get_migration_subtask.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationSubtask", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", - "title": "MigrationService listMigrationSubtasks Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration subtasks.", - "canonical": true, - "file": "migration_service.list_migration_subtasks.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 78, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v2alpha/src/index.ts b/owl-bot-staging/v2alpha/src/index.ts deleted file mode 100644 index 288e629..0000000 --- a/owl-bot-staging/v2alpha/src/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v2alpha from './v2alpha'; -const MigrationServiceClient = v2alpha.MigrationServiceClient; -type MigrationServiceClient = v2alpha.MigrationServiceClient; -export {v2alpha, MigrationServiceClient}; -export default {v2alpha, MigrationServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json deleted file mode 100644 index adf8d06..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.migration.v2alpha", - "libraryPackage": "@google-cloud/bigquery-migration", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/index.ts b/owl-bot-staging/v2alpha/src/v2alpha/index.ts deleted file mode 100644 index f75d208..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts deleted file mode 100644 index d5105f9..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts +++ /dev/null @@ -1,1256 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import type * as gax from 'google-gax'; -import type {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; -import {Transform} from 'stream'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v2alpha/migration_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './migration_service_client_config.json'; -const version = require('../../../package.json').version; - -/** - * Service to handle EDW migrations. - * @class - * @memberof v2alpha - */ -export class MigrationServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - migrationServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of MigrationServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you - * need to avoid loading the default gRPC version and want to use the fallback - * HTTP implementation. Load only fallback version and pass it to the constructor: - * ``` - * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC - * const client = new MigrationServiceClient({fallback: 'rest'}, gax); - * ``` - */ - constructor(opts?: ClientOptions, gaxInstance?: typeof gax | typeof gax.fallback) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof MigrationServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Load google-gax module synchronously if needed - if (!gaxInstance) { - gaxInstance = require('google-gax') as typeof gax; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' - ), - migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listMigrationWorkflows: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), - listMigrationSubtasks: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.migration.v2alpha.MigrationService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = this._gaxModule.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.migrationServiceStub) { - return this.migrationServiceStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.migration.v2alpha.MigrationService. - this.migrationServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2alpha.MigrationService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.migration.v2alpha.MigrationService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const migrationServiceStubMethods = - ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; - for (const methodName of migrationServiceStubMethods) { - const callPromise = this.migrationServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor, - this._opts.fallback - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.migrationServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates a migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - * @param {google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} request.migrationWorkflow - * Required. The migration workflow to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.create_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async - */ - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.get_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async - */ - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationWorkflow(request, options, callback); - } -/** - * Deletes a migration workflow by name. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.delete_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async - */ - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); - } -/** - * Starts a previously created migration workflow. I.e., the state transitions - * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - * An error will be signaled if the state is anything other than DRAFT or - * RUNNING. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.start_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async - */ - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.startMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration subtask. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.get_migration_subtask.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async - */ - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationSubtask(request, options, callback); - } - - /** - * Lists previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse - ]>; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationWorkflows(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflowsStream( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationWorkflows`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.list_migration_workflows.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async - */ - listMigrationWorkflowsAsync( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.asyncIterate( - this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - /** - * Lists previously created migration subtasks. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse - ]>; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationSubtasks(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasksStream( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationSubtasks`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.list_migration_subtasks.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async - */ - listMigrationSubtasksAsync( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.asyncIterate( - this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified migrationSubtask resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @param {string} subtask - * @returns {string} Resource name string. - */ - migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { - return this.pathTemplates.migrationSubtaskPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - subtask: subtask, - }); - } - - /** - * Parse the project from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; - } - - /** - * Parse the location from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; - } - - /** - * Parse the workflow from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; - } - - /** - * Parse the subtask from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the subtask. - */ - matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; - } - - /** - * Return a fully-qualified migrationWorkflow resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @returns {string} Resource name string. - */ - migrationWorkflowPath(project:string,location:string,workflow:string) { - return this.pathTemplates.migrationWorkflowPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - }); - } - - /** - * Parse the project from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; - } - - /** - * Parse the location from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; - } - - /** - * Parse the workflow from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.migrationServiceStub && !this._terminated) { - return this.migrationServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json deleted file mode 100644 index 2184b83..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.migration.v2alpha.MigrationService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetMigrationWorkflow": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationWorkflows": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "DeleteMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartMigrationWorkflow": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "GetMigrationSubtask": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationSubtasks": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - } - } - } - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json deleted file mode 100644 index 8e91e42..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto" -] diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js deleted file mode 100644 index ecc7e4b..0000000 --- a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/bigquery-migration'); - -function main() { - const migrationServiceClient = new migration.MigrationServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 80fbe2d..0000000 --- a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; - -// check that the client class type name can be used -function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const migrationServiceClient = new MigrationServiceClient(); - doStuffWithMigrationServiceClient(migrationServiceClient); -} - -main(); diff --git a/owl-bot-staging/v2alpha/system-test/install.ts b/owl-bot-staging/v2alpha/system-test/install.ts deleted file mode 100644 index 557a575..0000000 --- a/owl-bot-staging/v2alpha/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {packNTest} from 'pack-n-play'; -import {readFileSync} from 'fs'; -import {describe, it} from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts deleted file mode 100644 index 99aac57..0000000 --- a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts +++ /dev/null @@ -1,1256 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import {describe, it} from 'mocha'; -import * as migrationserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v2alpha.MigrationServiceClient', () => { - describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - }); - - describe('createMigrationWorkflow', () => { - it('invokes createMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.createMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationWorkflow', () => { - it('invokes getMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - }); - }); - - describe('deleteMigrationWorkflow', () => { - it('invokes deleteMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.deleteMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - }); - }); - - describe('startMigrationWorkflow', () => { - it('invokes startMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.startMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationSubtask', () => { - it('invokes getMigrationSubtask without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationSubtask(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationSubtask( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationSubtask with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - }); - }); - - describe('listMigrationWorkflows', () => { - it('invokes listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationWorkflows(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflows without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationWorkflows( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationWorkflows(request), expectedError); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflowsStream without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationWorkflowsStream with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; - const iterable = client.listMigrationWorkflowsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationWorkflowsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('listMigrationSubtasks', () => { - it('invokes listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationSubtasks(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasks without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationSubtasks( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationSubtasks(request), expectedError); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasksStream without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationSubtasksStream with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; - const iterable = client.listMigrationSubtasksAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationSubtasksAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationSubtask', () => { - const fakePath = "/rendered/path/migrationSubtask"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - subtask: "subtaskValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationSubtaskPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationSubtaskPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationSubtaskPath', () => { - const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationSubtaskName', () => { - const result = client.matchProjectFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationSubtaskName', () => { - const result = client.matchLocationFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationSubtaskName', () => { - const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchSubtaskFromMigrationSubtaskName', () => { - const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "subtaskValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationWorkflow', () => { - const fakePath = "/rendered/path/migrationWorkflow"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationWorkflowPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationWorkflowPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationWorkflowPath', () => { - const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationWorkflowName', () => { - const result = client.matchProjectFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationWorkflowName', () => { - const result = client.matchLocationFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationWorkflowName', () => { - const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v2alpha/tsconfig.json b/owl-bot-staging/v2alpha/tsconfig.json deleted file mode 100644 index c78f1c8..0000000 --- a/owl-bot-staging/v2alpha/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v2alpha/webpack.config.js b/owl-bot-staging/v2alpha/webpack.config.js deleted file mode 100644 index 9178c4b..0000000 --- a/owl-bot-staging/v2alpha/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'MigrationService', - filename: './migration-service.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -}; diff --git a/src/v2/migration_service_client.ts b/src/v2/migration_service_client.ts index add606f..c43d8fe 100644 --- a/src/v2/migration_service_client.ts +++ b/src/v2/migration_service_client.ts @@ -17,8 +17,8 @@ // ** All changes to this file may be overwritten. ** /* global window */ -import * as gax from 'google-gax'; -import { +import type * as gax from 'google-gax'; +import type { Callback, CallOptions, Descriptors, @@ -26,7 +26,6 @@ import { PaginationCallback, GaxCall, } from 'google-gax'; - import {Transform} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); @@ -36,7 +35,6 @@ import jsonProtos = require('../../protos/protos.json'); * This file defines retry strategy and timeouts for all API methods in this library. */ import * as gapicConfig from './migration_service_client_config.json'; - const version = require('../../../package.json').version; /** @@ -96,8 +94,18 @@ export class MigrationServiceClient { * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. * For more information, please check the * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new MigrationServiceClient({fallback: 'rest'}, gax); + * ``` */ - constructor(opts?: ClientOptions) { + constructor( + opts?: ClientOptions, + gaxInstance?: typeof gax | typeof gax.fallback + ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof MigrationServiceClient; const servicePath = @@ -117,8 +125,13 @@ export class MigrationServiceClient { opts['scopes'] = staticMembers.scopes; } + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. this._gaxGrpc = new this._gaxModule.GrpcClient(opts); @@ -203,7 +216,7 @@ export class MigrationServiceClient { this.innerApiCalls = {}; // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; + this.warn = this._gaxModule.warn; } /** @@ -424,7 +437,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); @@ -528,7 +541,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); @@ -626,7 +639,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); @@ -731,7 +744,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); @@ -835,7 +848,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); @@ -942,7 +955,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); @@ -992,7 +1005,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listMigrationWorkflows']; @@ -1047,7 +1060,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listMigrationWorkflows']; @@ -1163,7 +1176,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); @@ -1213,7 +1226,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listMigrationSubtasks']; @@ -1272,7 +1285,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listMigrationSubtasks']; diff --git a/src/v2alpha/migration_service_client.ts b/src/v2alpha/migration_service_client.ts index c6b8fca..86e252d 100644 --- a/src/v2alpha/migration_service_client.ts +++ b/src/v2alpha/migration_service_client.ts @@ -17,8 +17,8 @@ // ** All changes to this file may be overwritten. ** /* global window */ -import * as gax from 'google-gax'; -import { +import type * as gax from 'google-gax'; +import type { Callback, CallOptions, Descriptors, @@ -26,7 +26,6 @@ import { PaginationCallback, GaxCall, } from 'google-gax'; - import {Transform} from 'stream'; import * as protos from '../../protos/protos'; import jsonProtos = require('../../protos/protos.json'); @@ -36,7 +35,6 @@ import jsonProtos = require('../../protos/protos.json'); * This file defines retry strategy and timeouts for all API methods in this library. */ import * as gapicConfig from './migration_service_client_config.json'; - const version = require('../../../package.json').version; /** @@ -96,8 +94,18 @@ export class MigrationServiceClient { * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. * For more information, please check the * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new MigrationServiceClient({fallback: 'rest'}, gax); + * ``` */ - constructor(opts?: ClientOptions) { + constructor( + opts?: ClientOptions, + gaxInstance?: typeof gax | typeof gax.fallback + ) { // Ensure that options include all the required fields. const staticMembers = this.constructor as typeof MigrationServiceClient; const servicePath = @@ -117,8 +125,13 @@ export class MigrationServiceClient { opts['scopes'] = staticMembers.scopes; } + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. this._gaxGrpc = new this._gaxModule.GrpcClient(opts); @@ -203,7 +216,7 @@ export class MigrationServiceClient { this.innerApiCalls = {}; // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; + this.warn = this._gaxModule.warn; } /** @@ -424,7 +437,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); @@ -528,7 +541,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); @@ -626,7 +639,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); @@ -731,7 +744,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); @@ -835,7 +848,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ name: request.name || '', }); this.initialize(); @@ -942,7 +955,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); @@ -992,7 +1005,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listMigrationWorkflows']; @@ -1047,7 +1060,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listMigrationWorkflows']; @@ -1163,7 +1176,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); this.initialize(); @@ -1213,7 +1226,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listMigrationSubtasks']; @@ -1272,7 +1285,7 @@ export class MigrationServiceClient { options.otherArgs = options.otherArgs || {}; options.otherArgs.headers = options.otherArgs.headers || {}; options.otherArgs.headers['x-goog-request-params'] = - gax.routingHeader.fromParams({ + this._gaxModule.routingHeader.fromParams({ parent: request.parent || '', }); const defaultCallSettings = this._defaults['listMigrationSubtasks']; From 885eb068bb72d4080db806604e1664ccf9eb0bd8 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 30 Aug 2022 10:26:14 +0000 Subject: [PATCH 12/13] fix: use _gaxModule when accessing gax for bundling PiperOrigin-RevId: 470911839 Source-Link: https://github.com/googleapis/googleapis/commit/352756699ebc5b2144c252867c265ea44448712e Source-Link: https://github.com/googleapis/googleapis-gen/commit/f16a1d224f00a630ea43d6a9a1a31f566f45cdea Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZjE2YTFkMjI0ZjAwYTYzMGVhNDNkNmE5YTFhMzFmNTY2ZjQ1Y2RlYSJ9 --- owl-bot-staging/v2/.eslintignore | 7 + owl-bot-staging/v2/.eslintrc.json | 3 + owl-bot-staging/v2/.gitignore | 14 + owl-bot-staging/v2/.jsdoc.js | 55 + owl-bot-staging/v2/.mocharc.js | 33 + owl-bot-staging/v2/.prettierrc.js | 22 + owl-bot-staging/v2/README.md | 1 + owl-bot-staging/v2/linkinator.config.json | 16 + owl-bot-staging/v2/package.json | 64 + .../migration/v2/migration_entities.proto | 233 +++ .../v2/migration_error_details.proto | 62 + .../migration/v2/migration_metrics.proto | 111 ++ .../migration/v2/migration_service.proto | 245 ++++ .../migration/v2/translation_config.proto | 257 ++++ ...ation_service.create_migration_workflow.js | 67 + ...ation_service.delete_migration_workflow.js | 62 + ...migration_service.get_migration_subtask.js | 66 + ...igration_service.get_migration_workflow.js | 66 + ...gration_service.list_migration_subtasks.js | 86 ++ ...ration_service.list_migration_workflows.js | 80 ++ ...ration_service.start_migration_workflow.js | 62 + ...ta.google.cloud.bigquery.migration.v2.json | 335 +++++ owl-bot-staging/v2/src/index.ts | 25 + owl-bot-staging/v2/src/v2/gapic_metadata.json | 101 ++ owl-bot-staging/v2/src/v2/index.ts | 19 + .../v2/src/v2/migration_service_client.ts | 1256 +++++++++++++++++ .../v2/migration_service_client_config.json | 71 + .../src/v2/migration_service_proto_list.json | 7 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 32 + owl-bot-staging/v2/system-test/install.ts | 49 + .../v2/test/gapic_migration_service_v2.ts | 1256 +++++++++++++++++ owl-bot-staging/v2/tsconfig.json | 19 + owl-bot-staging/v2/webpack.config.js | 64 + owl-bot-staging/v2alpha/.eslintignore | 7 + owl-bot-staging/v2alpha/.eslintrc.json | 3 + owl-bot-staging/v2alpha/.gitignore | 14 + owl-bot-staging/v2alpha/.jsdoc.js | 55 + owl-bot-staging/v2alpha/.mocharc.js | 33 + owl-bot-staging/v2alpha/.prettierrc.js | 22 + owl-bot-staging/v2alpha/README.md | 1 + .../v2alpha/linkinator.config.json | 16 + owl-bot-staging/v2alpha/package.json | 64 + .../migration/v2alpha/assessment_task.proto | 49 + .../v2alpha/migration_entities.proto | 244 ++++ .../v2alpha/migration_error_details.proto | 62 + .../migration/v2alpha/migration_metrics.proto | 111 ++ .../migration/v2alpha/migration_service.proto | 247 ++++ .../migration/v2alpha/translation_task.proto | 207 +++ ...ation_service.create_migration_workflow.js | 67 + ...ation_service.delete_migration_workflow.js | 62 + ...migration_service.get_migration_subtask.js | 66 + ...igration_service.get_migration_workflow.js | 66 + ...gration_service.list_migration_subtasks.js | 86 ++ ...ration_service.list_migration_workflows.js | 80 ++ ...ration_service.start_migration_workflow.js | 62 + ...ogle.cloud.bigquery.migration.v2alpha.json | 335 +++++ owl-bot-staging/v2alpha/src/index.ts | 25 + .../v2alpha/src/v2alpha/gapic_metadata.json | 101 ++ owl-bot-staging/v2alpha/src/v2alpha/index.ts | 19 + .../src/v2alpha/migration_service_client.ts | 1256 +++++++++++++++++ .../migration_service_client_config.json | 73 + .../v2alpha/migration_service_proto_list.json | 8 + .../system-test/fixtures/sample/src/index.js | 27 + .../system-test/fixtures/sample/src/index.ts | 32 + .../v2alpha/system-test/install.ts | 49 + .../test/gapic_migration_service_v2alpha.ts | 1256 +++++++++++++++++ owl-bot-staging/v2alpha/tsconfig.json | 19 + owl-bot-staging/v2alpha/webpack.config.js | 64 + 69 files changed, 9761 insertions(+) create mode 100644 owl-bot-staging/v2/.eslintignore create mode 100644 owl-bot-staging/v2/.eslintrc.json create mode 100644 owl-bot-staging/v2/.gitignore create mode 100644 owl-bot-staging/v2/.jsdoc.js create mode 100644 owl-bot-staging/v2/.mocharc.js create mode 100644 owl-bot-staging/v2/.prettierrc.js create mode 100644 owl-bot-staging/v2/README.md create mode 100644 owl-bot-staging/v2/linkinator.config.json create mode 100644 owl-bot-staging/v2/package.json create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto create mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js create mode 100644 owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json create mode 100644 owl-bot-staging/v2/src/index.ts create mode 100644 owl-bot-staging/v2/src/v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/src/v2/index.ts create mode 100644 owl-bot-staging/v2/src/v2/migration_service_client.ts create mode 100644 owl-bot-staging/v2/src/v2/migration_service_client_config.json create mode 100644 owl-bot-staging/v2/src/v2/migration_service_proto_list.json create mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v2/system-test/install.ts create mode 100644 owl-bot-staging/v2/test/gapic_migration_service_v2.ts create mode 100644 owl-bot-staging/v2/tsconfig.json create mode 100644 owl-bot-staging/v2/webpack.config.js create mode 100644 owl-bot-staging/v2alpha/.eslintignore create mode 100644 owl-bot-staging/v2alpha/.eslintrc.json create mode 100644 owl-bot-staging/v2alpha/.gitignore create mode 100644 owl-bot-staging/v2alpha/.jsdoc.js create mode 100644 owl-bot-staging/v2alpha/.mocharc.js create mode 100644 owl-bot-staging/v2alpha/.prettierrc.js create mode 100644 owl-bot-staging/v2alpha/README.md create mode 100644 owl-bot-staging/v2alpha/linkinator.config.json create mode 100644 owl-bot-staging/v2alpha/package.json create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto create mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js create mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json create mode 100644 owl-bot-staging/v2alpha/src/index.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/index.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json create mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json create mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v2alpha/system-test/install.ts create mode 100644 owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts create mode 100644 owl-bot-staging/v2alpha/tsconfig.json create mode 100644 owl-bot-staging/v2alpha/webpack.config.js diff --git a/owl-bot-staging/v2/.eslintignore b/owl-bot-staging/v2/.eslintignore new file mode 100644 index 0000000..cfc348e --- /dev/null +++ b/owl-bot-staging/v2/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v2/.eslintrc.json b/owl-bot-staging/v2/.eslintrc.json new file mode 100644 index 0000000..7821534 --- /dev/null +++ b/owl-bot-staging/v2/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v2/.gitignore b/owl-bot-staging/v2/.gitignore new file mode 100644 index 0000000..5d32b23 --- /dev/null +++ b/owl-bot-staging/v2/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v2/.jsdoc.js b/owl-bot-staging/v2/.jsdoc.js new file mode 100644 index 0000000..c3c1e3d --- /dev/null +++ b/owl-bot-staging/v2/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/bigquery-migration', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v2/.mocharc.js b/owl-bot-staging/v2/.mocharc.js new file mode 100644 index 0000000..481c522 --- /dev/null +++ b/owl-bot-staging/v2/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v2/.prettierrc.js b/owl-bot-staging/v2/.prettierrc.js new file mode 100644 index 0000000..494e147 --- /dev/null +++ b/owl-bot-staging/v2/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v2/README.md b/owl-bot-staging/v2/README.md new file mode 100644 index 0000000..4e0341c --- /dev/null +++ b/owl-bot-staging/v2/README.md @@ -0,0 +1 @@ +Migration: Nodejs Client diff --git a/owl-bot-staging/v2/linkinator.config.json b/owl-bot-staging/v2/linkinator.config.json new file mode 100644 index 0000000..befd23c --- /dev/null +++ b/owl-bot-staging/v2/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2/package.json b/owl-bot-staging/v2/package.json new file mode 100644 index 0000000..24ced44 --- /dev/null +++ b/owl-bot-staging/v2/package.json @@ -0,0 +1,64 @@ +{ + "name": "@google-cloud/bigquery-migration", + "version": "0.1.0", + "description": "Migration client for Node.js", + "repository": "googleapis/nodejs-migration", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google migration", + "migration", + "migration service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^3.3.1" + }, + "devDependencies": { + "@types/mocha": "^9.1.1", + "@types/node": "^16.11.56", + "@types/sinon": "^10.0.13", + "c8": "^7.12.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.11", + "jsdoc-fresh": "^2.0.1", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^4.0.2", + "mocha": "^10.0.0", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^8.4.0", + "typescript": "^4.8.2", + "webpack": "^4.46.0", + "webpack-cli": "^4.10.0" + }, + "engines": { + "node": ">=v12" + } +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto new file mode 100644 index 0000000..7d77bae --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto @@ -0,0 +1,233 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2/migration_metrics.proto"; +import "google/cloud/bigquery/migration/v2/translation_config.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. + // The ID is server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // The details of the task. + oneof task_details { + // Task configuration for Batch SQL Translation. + TranslationConfigDetails translation_config_details = 14; + } + + // Output only. Immutable. The unique identifier for the migration task. The + // ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be one of the supported task types: + // Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ, + // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, + // Translation_Snowflake2BQ, Translation_Netezza2BQ, + // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, + // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. + string type = 2; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in + // FAILED state. + google.rpc.ErrorInfo processing_error = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID + // is server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in + // FAILED state. + google.rpc.ErrorInfo processing_error = 6 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while + // processing the subtask. Presence of error details does not mean that the + // subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto new file mode 100644 index 0000000..199e2db --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto new file mode 100644 index 0000000..e52fead --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately `+/-9.2x10^18`. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately `+/-10^(+/-300)` and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto new file mode 100644 index 0000000..3c1a89e --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto @@ -0,0 +1,245 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2/migration_entities.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto new file mode 100644 index 0000000..994140d --- /dev/null +++ b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto @@ -0,0 +1,257 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; +option java_multiple_files = true; +option java_outer_classname = "TranslationConfigProto"; +option java_package = "com.google.cloud.bigquery.migration.v2"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; + +// The translation config to capture necessary settings for a translation task +// and subtask. +message TranslationConfigDetails { + // The chosen path where the source for input files will be found. + oneof source_location { + // The Cloud Storage path for a directory of files to translate in a task. + string gcs_source_path = 1; + } + + // The chosen path where the destination for output files will be found. + oneof target_location { + // The Cloud Storage path to write back the corresponding input files to. + string gcs_target_path = 2; + } + + // The dialect of the input files. + Dialect source_dialect = 3; + + // The target dialect for the engine to translate the input to. + Dialect target_dialect = 4; + + // The mapping of full SQL object names from their current state to the + // desired output. + oneof output_name_mapping { + // The mapping of objects to their desired output names in list form. + ObjectNameMappingList name_mapping_list = 5; + } + + // The default source environment values for the translation. + SourceEnv source_env = 6; +} + +// The possible dialect options for translation. +message Dialect { + // The possible dialect options that this message represents. + oneof dialect_value { + // The BigQuery dialect + BigQueryDialect bigquery_dialect = 1; + + // The HiveQL dialect + HiveQLDialect hiveql_dialect = 2; + + // The Redshift dialect + RedshiftDialect redshift_dialect = 3; + + // The Teradata dialect + TeradataDialect teradata_dialect = 4; + + // The Oracle dialect + OracleDialect oracle_dialect = 5; + + // The SparkSQL dialect + SparkSQLDialect sparksql_dialect = 6; + + // The Snowflake dialect + SnowflakeDialect snowflake_dialect = 7; + + // The Netezza dialect + NetezzaDialect netezza_dialect = 8; + + // The Azure Synapse dialect + AzureSynapseDialect azure_synapse_dialect = 9; + + // The Vertica dialect + VerticaDialect vertica_dialect = 10; + + // The SQL Server dialect + SQLServerDialect sql_server_dialect = 11; + + // The Postgresql dialect + PostgresqlDialect postgresql_dialect = 12; + + // The Presto dialect + PrestoDialect presto_dialect = 13; + + // The MySQL dialect + MySQLDialect mysql_dialect = 14; + } +} + +// The dialect definition for BigQuery. +message BigQueryDialect {} + +// The dialect definition for HiveQL. +message HiveQLDialect {} + +// The dialect definition for Redshift. +message RedshiftDialect {} + +// The dialect definition for Teradata. +message TeradataDialect { + // The sub-dialect options for Teradata. + enum Mode { + // Unspecified mode. + MODE_UNSPECIFIED = 0; + + // Teradata SQL mode. + SQL = 1; + + // BTEQ mode (which includes SQL). + BTEQ = 2; + } + + // Which Teradata sub-dialect mode the user specifies. + Mode mode = 1; +} + +// The dialect definition for Oracle. +message OracleDialect {} + +// The dialect definition for SparkSQL. +message SparkSQLDialect {} + +// The dialect definition for Snowflake. +message SnowflakeDialect {} + +// The dialect definition for Netezza. +message NetezzaDialect {} + +// The dialect definition for Azure Synapse. +message AzureSynapseDialect {} + +// The dialect definition for Vertica. +message VerticaDialect {} + +// The dialect definition for SQL Server. +message SQLServerDialect {} + +// The dialect definition for Postgresql. +message PostgresqlDialect {} + +// The dialect definition for Presto. +message PrestoDialect {} + +// The dialect definition for MySQL. +message MySQLDialect {} + +// Represents a map of name mappings using a list of key:value proto messages of +// existing name to desired output name. +message ObjectNameMappingList { + // The elements of the object name map. + repeated ObjectNameMapping name_map = 1; +} + +// Represents a key-value pair of NameMappingKey to NameMappingValue to +// represent the mapping of SQL names from the input value to desired output. +message ObjectNameMapping { + // The name of the object in source that is being mapped. + NameMappingKey source = 1; + + // The desired target name of the object that is being mapped. + NameMappingValue target = 2; +} + +// The potential components of a full name mapping that will be mapped +// during translation in the source data warehouse. +message NameMappingKey { + // The type of the object that is being mapped. + enum Type { + // Unspecified name mapping type. + TYPE_UNSPECIFIED = 0; + + // The object being mapped is a database. + DATABASE = 1; + + // The object being mapped is a schema. + SCHEMA = 2; + + // The object being mapped is a relation. + RELATION = 3; + + // The object being mapped is an attribute. + ATTRIBUTE = 4; + + // The object being mapped is a relation alias. + RELATION_ALIAS = 5; + + // The object being mapped is a an attribute alias. + ATTRIBUTE_ALIAS = 6; + + // The object being mapped is a function. + FUNCTION = 7; + } + + // The type of object that is being mapped. + Type type = 1; + + // The database name (BigQuery project ID equivalent in the source data + // warehouse). + string database = 2; + + // The schema name (BigQuery dataset equivalent in the source data warehouse). + string schema = 3; + + // The relation name (BigQuery table or view equivalent in the source data + // warehouse). + string relation = 4; + + // The attribute name (BigQuery column equivalent in the source data + // warehouse). + string attribute = 5; +} + +// The potential components of a full name mapping that will be mapped +// during translation in the target data warehouse. +message NameMappingValue { + // The database name (BigQuery project ID equivalent in the target data + // warehouse). + string database = 1; + + // The schema name (BigQuery dataset equivalent in the target data warehouse). + string schema = 2; + + // The relation name (BigQuery table or view equivalent in the target data + // warehouse). + string relation = 3; + + // The attribute name (BigQuery column equivalent in the target data + // warehouse). + string attribute = 4; +} + +// Represents the default source environment values for the translation. +message SourceEnv { + // The default database name to fully qualify SQL objects when their database + // name is missing. + string default_database = 1; + + // The schema search path. When SQL objects are missing schema name, + // translation engine will search through this list to find the value. + repeated string schema_search_path = 2; +} diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js new file mode 100644 index 0000000..8301c3a --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js @@ -0,0 +1,67 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, migrationWorkflow) { + // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + */ + // const parent = 'abc123' + /** + * Required. The migration workflow to create. + */ + // const migrationWorkflow = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callCreateMigrationWorkflow() { + // Construct request + const request = { + parent, + migrationWorkflow, + }; + + // Run request + const response = await migrationClient.createMigrationWorkflow(request); + console.log(response); + } + + callCreateMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js new file mode 100644 index 0000000..9f0651e --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callDeleteMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.deleteMigrationWorkflow(request); + console.log(response); + } + + callDeleteMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js new file mode 100644 index 0000000..25de9e0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + */ + // const name = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationSubtask() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationSubtask(request); + console.log(response); + } + + callGetMigrationSubtask(); + // [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js new file mode 100644 index 0000000..52ab5cd --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationWorkflow(request); + console.log(response); + } + + callGetMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js new file mode 100644 index 0000000..c5c7ed0 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js @@ -0,0 +1,86 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const parent = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + /** + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + /** + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + */ + // const filter = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationSubtasks() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationSubtasksAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationSubtasks(); + // [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js new file mode 100644 index 0000000..ebd2127 --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js @@ -0,0 +1,80 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + */ + // const parent = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + /** + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationWorkflows() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationWorkflowsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationWorkflows(); + // [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js new file mode 100644 index 0000000..7f8257d --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callStartMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.startMigrationWorkflow(request); + console.log(response); + } + + callStartMigrationWorkflow(); + // [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json new file mode 100644 index 0000000..81ec8bb --- /dev/null +++ b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json @@ -0,0 +1,335 @@ +{ + "clientLibrary": { + "name": "nodejs-migration", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2", + "version": "v2" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", + "title": "MigrationService createMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Creates a migration workflow.", + "canonical": true, + "file": "migration_service.create_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 59, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "migration_workflow", + "type": ".google.cloud.bigquery.migration.v2.MigrationWorkflow" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", + "title": "MigrationService getMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration workflow.", + "canonical": true, + "file": "migration_service.get_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", + "title": "MigrationService listMigrationWorkflows Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration workflow.", + "canonical": true, + "file": "migration_service.list_migration_workflows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 72, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", + "title": "MigrationService deleteMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Deletes a migration workflow by name.", + "canonical": true, + "file": "migration_service.delete_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", + "title": "MigrationService startMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", + "canonical": true, + "file": "migration_service.start_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", + "title": "MigrationService getMigrationSubtask Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration subtask.", + "canonical": true, + "file": "migration_service.get_migration_subtask.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.MigrationSubtask", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", + "title": "MigrationService listMigrationSubtasks Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration subtasks.", + "canonical": true, + "file": "migration_service.list_migration_subtasks.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 78, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2.MigrationService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v2/src/index.ts b/owl-bot-staging/v2/src/index.ts new file mode 100644 index 0000000..35a8fd9 --- /dev/null +++ b/owl-bot-staging/v2/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v2 from './v2'; +const MigrationServiceClient = v2.MigrationServiceClient; +type MigrationServiceClient = v2.MigrationServiceClient; +export {v2, MigrationServiceClient}; +export default {v2, MigrationServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v2/src/v2/gapic_metadata.json b/owl-bot-staging/v2/src/v2/gapic_metadata.json new file mode 100644 index 0000000..1b6a33c --- /dev/null +++ b/owl-bot-staging/v2/src/v2/gapic_metadata.json @@ -0,0 +1,101 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.migration.v2", + "libraryPackage": "@google-cloud/bigquery-migration", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/src/v2/index.ts b/owl-bot-staging/v2/src/v2/index.ts new file mode 100644 index 0000000..f75d208 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2/src/v2/migration_service_client.ts b/owl-bot-staging/v2/src/v2/migration_service_client.ts new file mode 100644 index 0000000..cfb3f86 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_client.ts @@ -0,0 +1,1256 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import type * as gax from 'google-gax'; +import type {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; +import {Transform} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v2/migration_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './migration_service_client_config.json'; +const version = require('../../../package.json').version; + +/** + * Service to handle EDW migrations. + * @class + * @memberof v2 + */ +export class MigrationServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + migrationServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MigrationServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new MigrationServiceClient({fallback: 'rest'}, gax); + * ``` + */ + constructor(opts?: ClientOptions, gaxInstance?: typeof gax | typeof gax.fallback) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof MigrationServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' + ), + migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listMigrationWorkflows: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), + listMigrationSubtasks: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.migration.v2.MigrationService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = this._gaxModule.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.migrationServiceStub) { + return this.migrationServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.migration.v2.MigrationService. + this.migrationServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2.MigrationService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.migration.v2.MigrationService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const migrationServiceStubMethods = + ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; + for (const methodName of migrationServiceStubMethods) { + const callPromise = this.migrationServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.migrationServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates a migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param {google.cloud.bigquery.migration.v2.MigrationWorkflow} request.migrationWorkflow + * Required. The migration workflow to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.create_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async + */ + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.get_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async + */ + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationWorkflow(request, options, callback); + } +/** + * Deletes a migration workflow by name. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.delete_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async + */ + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); + } +/** + * Starts a previously created migration workflow. I.e., the state transitions + * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + * An error will be signaled if the state is anything other than DRAFT or + * RUNNING. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.start_migration_workflow.js + * region_tag:bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async + */ + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.startMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration subtask. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.get_migration_subtask.js + * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async + */ + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationSubtask(request, options, callback); + } + + /** + * Lists previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse + ]>; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationWorkflows(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflowsStream( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.createStream( + this.innerApiCalls.listMigrationWorkflows as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationWorkflows`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.list_migration_workflows.js + * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async + */ + listMigrationWorkflowsAsync( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.asyncIterate( + this.innerApiCalls['listMigrationWorkflows'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + /** + * Lists previously created migration subtasks. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse + ]>; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): + Promise<[ + protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationSubtasks(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasksStream( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.createStream( + this.innerApiCalls.listMigrationSubtasks as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationSubtasks`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2/migration_service.list_migration_subtasks.js + * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async + */ + listMigrationSubtasksAsync( + request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.asyncIterate( + this.innerApiCalls['listMigrationSubtasks'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified migrationSubtask resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @param {string} subtask + * @returns {string} Resource name string. + */ + migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { + return this.pathTemplates.migrationSubtaskPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + subtask: subtask, + }); + } + + /** + * Parse the project from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; + } + + /** + * Parse the location from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; + } + + /** + * Parse the workflow from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; + } + + /** + * Parse the subtask from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the subtask. + */ + matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; + } + + /** + * Return a fully-qualified migrationWorkflow resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @returns {string} Resource name string. + */ + migrationWorkflowPath(project:string,location:string,workflow:string) { + return this.pathTemplates.migrationWorkflowPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + }); + } + + /** + * Parse the project from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; + } + + /** + * Parse the location from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; + } + + /** + * Parse the workflow from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.migrationServiceStub && !this._terminated) { + return this.migrationServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v2/src/v2/migration_service_client_config.json b/owl-bot-staging/v2/src/v2/migration_service_client_config.json new file mode 100644 index 0000000..5832815 --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_client_config.json @@ -0,0 +1,71 @@ +{ + "interfaces": { + "google.cloud.bigquery.migration.v2.MigrationService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 10000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationWorkflows": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "DeleteMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "GetMigrationSubtask": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ListMigrationSubtasks": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json new file mode 100644 index 0000000..57df7ab --- /dev/null +++ b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json @@ -0,0 +1,7 @@ +[ + "../../protos/google/cloud/bigquery/migration/v2/migration_entities.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_error_details.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_metrics.proto", + "../../protos/google/cloud/bigquery/migration/v2/migration_service.proto", + "../../protos/google/cloud/bigquery/migration/v2/translation_config.proto" +] diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js new file mode 100644 index 0000000..ecc7e4b --- /dev/null +++ b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const migration = require('@google-cloud/bigquery-migration'); + +function main() { + const migrationServiceClient = new migration.MigrationServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts new file mode 100644 index 0000000..80fbe2d --- /dev/null +++ b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,32 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; + +// check that the client class type name can be used +function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const migrationServiceClient = new MigrationServiceClient(); + doStuffWithMigrationServiceClient(migrationServiceClient); +} + +main(); diff --git a/owl-bot-staging/v2/system-test/install.ts b/owl-bot-staging/v2/system-test/install.ts new file mode 100644 index 0000000..557a575 --- /dev/null +++ b/owl-bot-staging/v2/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {packNTest} from 'pack-n-play'; +import {readFileSync} from 'fs'; +import {describe, it} from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts new file mode 100644 index 0000000..061c58c --- /dev/null +++ b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts @@ -0,0 +1,1256 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as migrationserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v2.MigrationServiceClient', () => { + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = migrationserviceModule.v2.MigrationServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = migrationserviceModule.v2.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('createMigrationWorkflow', () => { + it('invokes createMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.createMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationWorkflow', () => { + it('invokes getMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + }); + }); + + describe('deleteMigrationWorkflow', () => { + it('invokes deleteMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.deleteMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + }); + }); + + describe('startMigrationWorkflow', () => { + it('invokes startMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.startMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationSubtask', () => { + it('invokes getMigrationSubtask without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationSubtask(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationSubtask( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationSubtask with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask with closed client', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + }); + }); + + describe('listMigrationWorkflows', () => { + it('invokes listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationWorkflows(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflows without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationWorkflows( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationWorkflows(request), expectedError); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflowsStream without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationWorkflowsStream with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; + const iterable = client.listMigrationWorkflowsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationWorkflowsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('listMigrationSubtasks', () => { + it('invokes listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationSubtasks(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasks without error using callback', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationSubtasks( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationSubtasks(request), expectedError); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasksStream without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationSubtasksStream with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; + const iterable = client.listMigrationSubtasksAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationSubtasksAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationSubtask', () => { + const fakePath = "/rendered/path/migrationSubtask"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + subtask: "subtaskValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationSubtaskPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationSubtaskPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationSubtaskPath', () => { + const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationSubtaskName', () => { + const result = client.matchProjectFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationSubtaskName', () => { + const result = client.matchLocationFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationSubtaskName', () => { + const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchSubtaskFromMigrationSubtaskName', () => { + const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "subtaskValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationWorkflow', () => { + const fakePath = "/rendered/path/migrationWorkflow"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + }; + const client = new migrationserviceModule.v2.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationWorkflowPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationWorkflowPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationWorkflowPath', () => { + const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationWorkflowName', () => { + const result = client.matchProjectFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationWorkflowName', () => { + const result = client.matchLocationFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationWorkflowName', () => { + const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v2/tsconfig.json b/owl-bot-staging/v2/tsconfig.json new file mode 100644 index 0000000..c78f1c8 --- /dev/null +++ b/owl-bot-staging/v2/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v2/webpack.config.js b/owl-bot-staging/v2/webpack.config.js new file mode 100644 index 0000000..9178c4b --- /dev/null +++ b/owl-bot-staging/v2/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'MigrationService', + filename: './migration-service.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; diff --git a/owl-bot-staging/v2alpha/.eslintignore b/owl-bot-staging/v2alpha/.eslintignore new file mode 100644 index 0000000..cfc348e --- /dev/null +++ b/owl-bot-staging/v2alpha/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v2alpha/.eslintrc.json b/owl-bot-staging/v2alpha/.eslintrc.json new file mode 100644 index 0000000..7821534 --- /dev/null +++ b/owl-bot-staging/v2alpha/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v2alpha/.gitignore b/owl-bot-staging/v2alpha/.gitignore new file mode 100644 index 0000000..5d32b23 --- /dev/null +++ b/owl-bot-staging/v2alpha/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v2alpha/.jsdoc.js b/owl-bot-staging/v2alpha/.jsdoc.js new file mode 100644 index 0000000..c3c1e3d --- /dev/null +++ b/owl-bot-staging/v2alpha/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/bigquery-migration', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v2alpha/.mocharc.js b/owl-bot-staging/v2alpha/.mocharc.js new file mode 100644 index 0000000..481c522 --- /dev/null +++ b/owl-bot-staging/v2alpha/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v2alpha/.prettierrc.js b/owl-bot-staging/v2alpha/.prettierrc.js new file mode 100644 index 0000000..494e147 --- /dev/null +++ b/owl-bot-staging/v2alpha/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v2alpha/README.md b/owl-bot-staging/v2alpha/README.md new file mode 100644 index 0000000..4e0341c --- /dev/null +++ b/owl-bot-staging/v2alpha/README.md @@ -0,0 +1 @@ +Migration: Nodejs Client diff --git a/owl-bot-staging/v2alpha/linkinator.config.json b/owl-bot-staging/v2alpha/linkinator.config.json new file mode 100644 index 0000000..befd23c --- /dev/null +++ b/owl-bot-staging/v2alpha/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v2alpha/package.json b/owl-bot-staging/v2alpha/package.json new file mode 100644 index 0000000..24ced44 --- /dev/null +++ b/owl-bot-staging/v2alpha/package.json @@ -0,0 +1,64 @@ +{ + "name": "@google-cloud/bigquery-migration", + "version": "0.1.0", + "description": "Migration client for Node.js", + "repository": "googleapis/nodejs-migration", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google migration", + "migration", + "migration service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^3.3.1" + }, + "devDependencies": { + "@types/mocha": "^9.1.1", + "@types/node": "^16.11.56", + "@types/sinon": "^10.0.13", + "c8": "^7.12.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.11", + "jsdoc-fresh": "^2.0.1", + "jsdoc-region-tag": "^2.0.0", + "linkinator": "^4.0.2", + "mocha": "^10.0.0", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^14.0.0", + "ts-loader": "^8.4.0", + "typescript": "^4.8.2", + "webpack": "^4.46.0", + "webpack-cli": "^4.10.0" + }, + "engines": { + "node": ">=v12" + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto new file mode 100644 index 0000000..0c6ea13 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto @@ -0,0 +1,49 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "AssessmentTaskProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Assessment task config. +message AssessmentTaskDetails { + // Required. The Cloud Storage path for assessment input files. + string input_path = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The BigQuery dataset for output. + string output_dataset = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. An optional Cloud Storage path to write the query logs (which is + // then used as an input path on the translation task) + string querylogs_path = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data source or data warehouse type (eg: TERADATA/REDSHIFT) + // from which the input data is extracted. + string data_source = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// Details for an assessment task orchestration result. +message AssessmentOrchestrationResultDetails { + // Optional. The version used for the output table schemas. + string output_tables_schema_version = 1 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto new file mode 100644 index 0000000..50d4c75 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto @@ -0,0 +1,244 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/assessment_task.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/cloud/bigquery/migration/v2alpha/translation_task.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationEntitiesProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// A migration workflow which specifies what needs to be done for an EDW +// migration. +message MigrationWorkflow { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}" + }; + + // Possible migration workflow states. + enum State { + // Workflow state is unspecified. + STATE_UNSPECIFIED = 0; + + // Workflow is in draft status, i.e. tasks are not yet eligible for + // execution. + DRAFT = 1; + + // Workflow is running (i.e. tasks are eligible for execution). + RUNNING = 2; + + // Workflow is paused. Tasks currently in progress may continue, but no + // further tasks will be scheduled. + PAUSED = 3; + + // Workflow is complete. There should not be any task in a non-terminal + // state, but if they are (e.g. forced termination), they will not be + // scheduled. + COMPLETED = 4; + } + + // Output only. Immutable. The unique identifier for the migration workflow. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The display name of the workflow. This can be set to give a workflow + // a descriptive name. There is no guarantee or enforcement of uniqueness. + string display_name = 6; + + // The tasks in a workflow in a named map. The name (i.e. key) has no + // meaning and is merely a convenient way to address a specific task + // in a workflow. + map tasks = 2; + + // Output only. That status of the workflow. + State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the workflow was created. + google.protobuf.Timestamp create_time = 4; + + // Time when the workflow was last updated. + google.protobuf.Timestamp last_update_time = 5; +} + +// A single task for a migration which has details about the configuration of +// the task. +message MigrationTask { + // Possible states of a migration task. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The task is waiting for orchestration. + PENDING = 1; + + // The task is assigned to an orchestrator. + ORCHESTRATING = 2; + + // The task is running, i.e. its subtasks are ready for execution. + RUNNING = 3; + + // Tha task is paused. Assigned subtasks can continue, but no new subtasks + // will be scheduled. + PAUSED = 4; + + // The task finished successfully. + SUCCEEDED = 5; + + // The task finished unsuccessfully. + FAILED = 6; + } + + // The details of the task. + oneof task_details { + // Task configuration for Assessment. + AssessmentTaskDetails assessment_task_details = 12; + + // Task configuration for Batch/Offline SQL Translation. + TranslationTaskDetails translation_task_details = 13; + } + + // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. + string id = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The type of the task. This must be a supported task type. + string type = 2; + + // DEPRECATED! Use one of the task_details below. + // The details of the task. The type URL must be one of the supported task + // details messages and correspond to the Task's type. + google.protobuf.Any details = 3; + + // Output only. The current state of the task. + State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Time when the task was created. + google.protobuf.Timestamp create_time = 6; + + // Time when the task was last updated. + google.protobuf.Timestamp last_update_time = 7; + + // Output only. Additional information about the orchestration. + MigrationTaskOrchestrationResult orchestration_result = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// A subtask for a migration which carries details about the configuration of +// the subtask. The content of the details should not matter to the end user, +// but is a contract between the subtask creator and subtask worker. +message MigrationSubtask { + option (google.api.resource) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" + }; + + // Possible states of a migration subtask. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The subtask is ready, i.e. it is ready for execution. + ACTIVE = 1; + + // The subtask is running, i.e. it is assigned to a worker for execution. + RUNNING = 2; + + // The subtask finished successfully. + SUCCEEDED = 3; + + // The subtask finished unsuccessfully. + FAILED = 4; + + // The subtask is paused, i.e., it will not be scheduled. If it was already + // assigned,it might still finish but no new lease renewals will be granted. + PAUSED = 5; + } + + // Output only. Immutable. The resource name for the migration subtask. The ID is + // server-generated. + // + // Example: `projects/123/locations/us/workflows/345/subtasks/678` + string name = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = IMMUTABLE + ]; + + // The unique ID of the task to which this subtask belongs. + string task_id = 2; + + // The type of the Subtask. The migration service does not check whether this + // is a known type. It is up to the task creator (i.e. orchestrator or worker) + // to ensure it only creates subtasks for which there are compatible workers + // polling for Subtasks. + string type = 3; + + // Output only. The current state of the subtask. + State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. An explanation that may be populated when the task is in FAILED state. + google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Provides details to errors and issues encountered while processing the + // subtask. Presence of error details does not mean that the subtask failed. + repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The number or resources with errors. Note: This is not the total + // number of errors as each resource can have more than one error. + // This is used to indicate truncation by having a `resource_error_count` + // that is higher than the size of `resource_error_details`. + int32 resource_error_count = 13; + + // Time when the subtask was created. + google.protobuf.Timestamp create_time = 7; + + // Time when the subtask was last updated. + google.protobuf.Timestamp last_update_time = 8; + + // The metrics for the subtask. + repeated TimeSeries metrics = 11; +} + +// Additional information from the orchestrator when it is done with the +// task orchestration. +message MigrationTaskOrchestrationResult { + // Details specific to the task type. + oneof details { + // Details specific to assessment task types. + AssessmentOrchestrationResultDetails assessment_details = 1; + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto new file mode 100644 index 0000000..89dac5e --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto @@ -0,0 +1,62 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/field_behavior.proto"; +import "google/rpc/error_details.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationErrorDetailsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Provides details for errors and the corresponding resources. +message ResourceErrorDetail { + // Required. Information about the resource where the error is located. + google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The error details for the resource. + repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. How many errors there are in total for the resource. Truncation can be + // indicated by having an `error_count` that is higher than the size of + // `error_details`. + int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Provides details for errors, e.g. issues that where encountered when +// processing a subtask. +message ErrorDetail { + // Optional. The exact location within the resource (if applicable). + ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Describes the cause of the error with structured detail. + google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Holds information about where the error is located. +message ErrorLocation { + // Optional. If applicable, denotes the line where the error occurred. A zero value + // means that there is no line information. + int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If applicable, denotes the column where the error occurred. A zero value + // means that there is no columns information. + int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto new file mode 100644 index 0000000..ce60dd2 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto @@ -0,0 +1,111 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/distribution.proto"; +import "google/api/field_behavior.proto"; +import "google/api/metric.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationMetricsProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// The metrics object for a SubTask. +message TimeSeries { + // Required. The name of the metric. + // + // If the metric is not known by the service yet, it will be auto-created. + string metric = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The value type of the time series. + google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The metric kind of the time series. + // + // If present, it must be the same as the metric kind of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // this field specifies the metric kind of the new descriptor and must be + // either `GAUGE` (the default) or `CUMULATIVE`. + google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// A single data point in a time series. +message Point { + // The time interval to which the data point applies. For `GAUGE` metrics, + // the start time does not need to be supplied, but if it is supplied, it must + // equal the end time. For `DELTA` metrics, the start and end time should + // specify a non-zero interval, with subsequent points specifying contiguous + // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end + // time should specify a non-zero interval, with subsequent points specifying + // the same start time and increasing end times, until an event resets the + // cumulative value to zero and sets a new start time for the following + // points. + TimeInterval interval = 1; + + // The value of the data point. + TypedValue value = 2; +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +message TimeInterval { + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The end of the time interval. + google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A single strongly-typed value. +message TypedValue { + // The typed value field. + oneof value { + // A Boolean value: `true` or `false`. + bool bool_value = 1; + + // A 64-bit integer. Its range is approximately +/-9.2x10^18. + int64 int64_value = 2; + + // A 64-bit double-precision floating-point number. Its magnitude + // is approximately +/-10^(+/-300) and it has 16 significant digits of + // precision. + double double_value = 3; + + // A variable-length string value. + string string_value = 4; + + // A distribution value. + google.api.Distribution distribution_value = 5; + } +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto new file mode 100644 index 0000000..9a184a1 --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto @@ -0,0 +1,247 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; +import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "MigrationServiceProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Service to handle EDW migrations. +service MigrationService { + option (google.api.default_host) = "bigquerymigration.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a migration workflow. + rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + post: "/v2alpha/{parent=projects/*/locations/*}/workflows" + body: "migration_workflow" + }; + option (google.api.method_signature) = "parent,migration_workflow"; + } + + // Gets a previously created migration workflow. + rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration workflow. + rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*}/workflows" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a migration workflow by name. + rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts a previously created migration workflow. I.e., the state transitions + // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + // An error will be signaled if the state is anything other than DRAFT or + // RUNNING. + rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Gets a previously created migration subtask. + rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { + option (google.api.http) = { + get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists previously created migration subtasks. + rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { + option (google.api.http) = { + get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" + }; + option (google.api.method_signature) = "parent"; + } +} + +// Request to create a migration workflow resource. +message CreateMigrationWorkflowRequest { + // Required. The name of the project to which this migration workflow belongs. + // Example: `projects/foo/locations/bar` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The migration workflow to create. + MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to get a previously created migration workflow. +message GetMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; +} + +// A request to list previously created migration workflows. +message ListMigrationWorkflowsRequest { + // Required. The project and location of the migration workflows to list. + // Example: `projects/123/locations/us` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2; + + // The maximum number of migration workflows to return. The service may return + // fewer than this number. + int32 page_size = 3; + + // A page token, received from previous `ListMigrationWorkflows` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationWorkflows` + // must match the call that provided the page token. + string page_token = 4; +} + +// Response object for a `ListMigrationWorkflows` call. +message ListMigrationWorkflowsResponse { + // The migration workflows for the specified project / location. + repeated MigrationWorkflow migration_workflows = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a previously created migration workflow. +message DeleteMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to start a previously created migration workflow. +message StartMigrationWorkflowRequest { + // Required. The unique identifier for the migration workflow. + // Example: `projects/123/locations/us/workflows/1234` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; +} + +// A request to get a previously created migration subtasks. +message GetMigrationSubtaskRequest { + // Required. The unique identifier for the migration subtask. + // Example: `projects/123/locations/us/workflows/1234/subtasks/543` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationSubtask" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to list previously created migration subtasks. +message ListMigrationSubtasksRequest { + // Required. The migration task of the subtasks to list. + // Example: `projects/123/locations/us/workflows/1234` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerymigration.googleapis.com/MigrationWorkflow" + } + ]; + + // Optional. The list of fields to be retrieved. + google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The maximum number of migration tasks to return. The service may return + // fewer than this number. + int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token, received from previous `ListMigrationSubtasks` call. + // Provide this to retrieve the subsequent page. + // + // When paginating, all other parameters provided to `ListMigrationSubtasks` + // must match the call that provided the page token. + string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The filter to apply. This can be used to get the subtasks of a specific + // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + // task ID (not the name in the named map). + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response object for a `ListMigrationSubtasks` call. +message ListMigrationSubtasksResponse { + // The migration subtasks for the specified task. + repeated MigrationSubtask migration_subtasks = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto new file mode 100644 index 0000000..bf4b27e --- /dev/null +++ b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto @@ -0,0 +1,207 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.bigquery.migration.v2alpha; + +option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; +option java_multiple_files = true; +option java_outer_classname = "TranslationTaskProto"; +option java_package = "com.google.cloud.bigquery.migration.v2alpha"; +option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; + +// Mapping between an input and output file to be translated in a subtask. +message TranslationFileMapping { + // The Cloud Storage path for a file to translation in a subtask. + string input_path = 1; + + // The Cloud Storage path to write back the corresponding input file to. + string output_path = 2; +} + +// The translation task config to capture necessary settings for a translation +// task and subtask. +message TranslationTaskDetails { + // The file encoding types. + enum FileEncoding { + // File encoding setting is not specified. + FILE_ENCODING_UNSPECIFIED = 0; + + // File encoding is UTF_8. + UTF_8 = 1; + + // File encoding is ISO_8859_1. + ISO_8859_1 = 2; + + // File encoding is US_ASCII. + US_ASCII = 3; + + // File encoding is UTF_16. + UTF_16 = 4; + + // File encoding is UTF_16LE. + UTF_16LE = 5; + + // File encoding is UTF_16BE. + UTF_16BE = 6; + } + + // The special token data type. + enum TokenType { + // Token type is not specified. + TOKEN_TYPE_UNSPECIFIED = 0; + + // Token type as string. + STRING = 1; + + // Token type as integer. + INT64 = 2; + + // Token type as numeric. + NUMERIC = 3; + + // Token type as boolean. + BOOL = 4; + + // Token type as float. + FLOAT64 = 5; + + // Token type as date. + DATE = 6; + + // Token type as timestamp. + TIMESTAMP = 7; + } + + // The language specific settings for the translation task. + oneof language_options { + // The Teradata SQL specific settings for the translation task. + TeradataOptions teradata_options = 10; + + // The BTEQ specific settings for the translation task. + BteqOptions bteq_options = 11; + } + + // The Cloud Storage path for translation input files. + string input_path = 1; + + // The Cloud Storage path for translation output files. + string output_path = 2; + + // Cloud Storage files to be processed for translation. + repeated TranslationFileMapping file_paths = 12; + + // The Cloud Storage path to DDL files as table schema to assist semantic + // translation. + string schema_path = 3; + + // The file encoding type. + FileEncoding file_encoding = 4; + + // The settings for SQL identifiers. + IdentifierSettings identifier_settings = 5; + + // The map capturing special tokens to be replaced during translation. The key + // is special token in string. The value is the token data type. This is used + // to translate SQL query template which contains special token as place + // holder. The special token makes a query invalid to parse. This map will be + // applied to annotate those special token with types to let parser understand + // how to parse them into proper structure with type information. + map special_token_map = 6; + + // The filter applied to translation details. + Filter filter = 7; + + // Specifies the exact name of the bigquery table ("dataset.table") to be used + // for surfacing raw translation errors. If the table does not exist, we will + // create it. If it already exists and the schema is the same, we will re-use. + // If the table exists and the schema is different, we will throw an error. + string translation_exception_table = 13; +} + +// The filter applied to fields of translation details. +message Filter { + // The list of prefixes used to exclude processing for input files. + repeated string input_file_exclusion_prefixes = 1; +} + +// Settings related to SQL identifiers. +message IdentifierSettings { + // The identifier case type. + enum IdentifierCase { + // The identifier case is not specified. + IDENTIFIER_CASE_UNSPECIFIED = 0; + + // Identifiers' cases will be kept as the original cases. + ORIGINAL = 1; + + // Identifiers will be in upper cases. + UPPER = 2; + + // Identifiers will be in lower cases. + LOWER = 3; + } + + // The SQL identifier rewrite mode. + enum IdentifierRewriteMode { + // SQL Identifier rewrite mode is unspecified. + IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0; + + // SQL identifiers won't be rewrite. + NONE = 1; + + // All SQL identifiers will be rewrite. + REWRITE_ALL = 2; + } + + // The setting to control output queries' identifier case. + IdentifierCase output_identifier_case = 1; + + // Specifies the rewrite mode for SQL identifiers. + IdentifierRewriteMode identifier_rewrite_mode = 2; +} + +// Teradata SQL specific translation task related settings. +message TeradataOptions { + +} + +// BTEQ translation task related settings. +message BteqOptions { + // Specifies the project and dataset in BigQuery that will be used for + // external table creation during the translation. + DatasetReference project_dataset = 1; + + // The Cloud Storage location to be used as the default path for files that + // are not otherwise specified in the file replacement map. + string default_path_uri = 2; + + // Maps the local paths that are used in BTEQ scripts (the keys) to the paths + // in Cloud Storage that should be used in their stead in the translation (the + // value). + map file_replacement_map = 3; +} + +// Reference to a BigQuery dataset. +message DatasetReference { + // A unique ID for this dataset, without the project name. The ID + // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). + // The maximum length is 1,024 characters. + string dataset_id = 1; + + // The ID of the project containing this dataset. + string project_id = 2; +} diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js new file mode 100644 index 0000000..d8613fe --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js @@ -0,0 +1,67 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, migrationWorkflow) { + // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + */ + // const parent = 'abc123' + /** + * Required. The migration workflow to create. + */ + // const migrationWorkflow = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callCreateMigrationWorkflow() { + // Construct request + const request = { + parent, + migrationWorkflow, + }; + + // Run request + const response = await migrationClient.createMigrationWorkflow(request); + console.log(response); + } + + callCreateMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js new file mode 100644 index 0000000..e17c7ee --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callDeleteMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.deleteMigrationWorkflow(request); + console.log(response); + } + + callDeleteMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js new file mode 100644 index 0000000..a17add1 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + */ + // const name = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationSubtask() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationSubtask(request); + console.log(response); + } + + callGetMigrationSubtask(); + // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js new file mode 100644 index 0000000..1519196 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js @@ -0,0 +1,66 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callGetMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.getMigrationWorkflow(request); + console.log(response); + } + + callGetMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js new file mode 100644 index 0000000..99419dd --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js @@ -0,0 +1,86 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const parent = 'abc123' + /** + * Optional. The list of fields to be retrieved. + */ + // const readMask = {} + /** + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + /** + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + */ + // const filter = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationSubtasks() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationSubtasksAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationSubtasks(); + // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js new file mode 100644 index 0000000..31c0263 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js @@ -0,0 +1,80 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + */ + // const parent = 'abc123' + /** + * The list of fields to be retrieved. + */ + // const readMask = {} + /** + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + */ + // const pageSize = 1234 + /** + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + */ + // const pageToken = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callListMigrationWorkflows() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await migrationClient.listMigrationWorkflowsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListMigrationWorkflows(); + // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js new file mode 100644 index 0000000..85288c6 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js @@ -0,0 +1,62 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] + /** + * This snippet has been automatically generated and should be regarded as a code template only. + * It will require modifications to work. + * It may require correct/in-range values for request initialization. + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + */ + // const name = 'abc123' + + // Imports the Migration library + const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; + + // Instantiates a client + const migrationClient = new MigrationServiceClient(); + + async function callStartMigrationWorkflow() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await migrationClient.startMigrationWorkflow(request); + console.log(response); + } + + callStartMigrationWorkflow(); + // [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json new file mode 100644 index 0000000..c5336c2 --- /dev/null +++ b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json @@ -0,0 +1,335 @@ +{ + "clientLibrary": { + "name": "nodejs-migration", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.bigquery.migration.v2alpha", + "version": "v2alpha" + } + ] + }, + "snippets": [ + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", + "title": "MigrationService createMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Creates a migration workflow.", + "canonical": true, + "file": "migration_service.create_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 59, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "migration_workflow", + "type": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "CreateMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", + "title": "MigrationService getMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration workflow.", + "canonical": true, + "file": "migration_service.get_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", + "title": "MigrationService listMigrationWorkflows Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration workflow.", + "canonical": true, + "file": "migration_service.list_migration_workflows.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 72, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationWorkflows", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", + "title": "MigrationService deleteMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Deletes a migration workflow by name.", + "canonical": true, + "file": "migration_service.delete_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "DeleteMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", + "title": "MigrationService startMigrationWorkflow Sample", + "origin": "API_DEFINITION", + "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", + "canonical": true, + "file": "migration_service.start_migration_workflow.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 54, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "StartMigrationWorkflow", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", + "title": "MigrationService getMigrationSubtask Sample", + "origin": "API_DEFINITION", + "description": " Gets a previously created migration subtask.", + "canonical": true, + "file": "migration_service.get_migration_subtask.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 58, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationSubtask", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "GetMigrationSubtask", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + }, + { + "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", + "title": "MigrationService listMigrationSubtasks Sample", + "origin": "API_DEFINITION", + "description": " Lists previously created migration subtasks.", + "canonical": true, + "file": "migration_service.list_migration_subtasks.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 78, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "read_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse", + "client": { + "shortName": "MigrationServiceClient", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" + }, + "method": { + "shortName": "ListMigrationSubtasks", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", + "service": { + "shortName": "MigrationService", + "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v2alpha/src/index.ts b/owl-bot-staging/v2alpha/src/index.ts new file mode 100644 index 0000000..288e629 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/index.ts @@ -0,0 +1,25 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v2alpha from './v2alpha'; +const MigrationServiceClient = v2alpha.MigrationServiceClient; +type MigrationServiceClient = v2alpha.MigrationServiceClient; +export {v2alpha, MigrationServiceClient}; +export default {v2alpha, MigrationServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json new file mode 100644 index 0000000..adf8d06 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json @@ -0,0 +1,101 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.bigquery.migration.v2alpha", + "libraryPackage": "@google-cloud/bigquery-migration", + "services": { + "MigrationService": { + "clients": { + "grpc": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "MigrationServiceClient", + "rpcs": { + "CreateMigrationWorkflow": { + "methods": [ + "createMigrationWorkflow" + ] + }, + "GetMigrationWorkflow": { + "methods": [ + "getMigrationWorkflow" + ] + }, + "DeleteMigrationWorkflow": { + "methods": [ + "deleteMigrationWorkflow" + ] + }, + "StartMigrationWorkflow": { + "methods": [ + "startMigrationWorkflow" + ] + }, + "GetMigrationSubtask": { + "methods": [ + "getMigrationSubtask" + ] + }, + "ListMigrationWorkflows": { + "methods": [ + "listMigrationWorkflows", + "listMigrationWorkflowsStream", + "listMigrationWorkflowsAsync" + ] + }, + "ListMigrationSubtasks": { + "methods": [ + "listMigrationSubtasks", + "listMigrationSubtasksStream", + "listMigrationSubtasksAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/index.ts b/owl-bot-staging/v2alpha/src/v2alpha/index.ts new file mode 100644 index 0000000..f75d208 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/index.ts @@ -0,0 +1,19 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts new file mode 100644 index 0000000..d5105f9 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts @@ -0,0 +1,1256 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import type * as gax from 'google-gax'; +import type {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; +import {Transform} from 'stream'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v2alpha/migration_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './migration_service_client_config.json'; +const version = require('../../../package.json').version; + +/** + * Service to handle EDW migrations. + * @class + * @memberof v2alpha + */ +export class MigrationServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + migrationServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of MigrationServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. + * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. + * For more information, please check the + * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. + * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you + * need to avoid loading the default gRPC version and want to use the fallback + * HTTP implementation. Load only fallback version and pass it to the constructor: + * ``` + * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC + * const client = new MigrationServiceClient({fallback: 'rest'}, gax); + * ``` + */ + constructor(opts?: ClientOptions, gaxInstance?: typeof gax | typeof gax.fallback) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof MigrationServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Load google-gax module synchronously if needed + if (!gaxInstance) { + gaxInstance = require('google-gax') as typeof gax; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' + ), + migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflows/{workflow}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listMigrationWorkflows: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), + listMigrationSubtasks: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.bigquery.migration.v2alpha.MigrationService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = this._gaxModule.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.migrationServiceStub) { + return this.migrationServiceStub; + } + + // Put together the "service stub" for + // google.cloud.bigquery.migration.v2alpha.MigrationService. + this.migrationServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2alpha.MigrationService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.migration.v2alpha.MigrationService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const migrationServiceStubMethods = + ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; + for (const methodName of migrationServiceStubMethods) { + const callPromise = this.migrationServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor, + this._opts.fallback + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.migrationServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'bigquerymigration.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates a migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The name of the project to which this migration workflow belongs. + * Example: `projects/foo/locations/bar` + * @param {google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} request.migrationWorkflow + * Required. The migration workflow to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.create_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async + */ + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + createMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.get_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async + */ + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + getMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationWorkflow(request, options, callback); + } +/** + * Deletes a migration workflow by name. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.delete_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async + */ + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + deleteMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); + } +/** + * Starts a previously created migration workflow. I.e., the state transitions + * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. + * An error will be signaled if the state is anything other than DRAFT or + * RUNNING. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration workflow. + * Example: `projects/123/locations/us/workflows/1234` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.start_migration_workflow.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async + */ + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): void; + startMigrationWorkflow( + request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.startMigrationWorkflow(request, options, callback); + } +/** + * Gets a previously created migration subtask. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The unique identifier for the migration subtask. + * Example: `projects/123/locations/us/workflows/1234/subtasks/543` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.get_migration_subtask.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async + */ + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + callback: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): void; + getMigrationSubtask( + request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, + protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getMigrationSubtask(request, options, callback); + } + + /** + * Lists previously created migration workflow. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse + ]>; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; + listMigrationWorkflows( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; + listMigrationWorkflows( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationWorkflows(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationWorkflowsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationWorkflowsStream( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.createStream( + this.innerApiCalls.listMigrationWorkflows as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationWorkflows`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The project and location of the migration workflows to list. + * Example: `projects/123/locations/us` + * @param {google.protobuf.FieldMask} request.readMask + * The list of fields to be retrieved. + * @param {number} request.pageSize + * The maximum number of migration workflows to return. The service may return + * fewer than this number. + * @param {string} request.pageToken + * A page token, received from previous `ListMigrationWorkflows` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationWorkflows` + * must match the call that provided the page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.list_migration_workflows.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async + */ + listMigrationWorkflowsAsync( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationWorkflows']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationWorkflows.asyncIterate( + this.innerApiCalls['listMigrationWorkflows'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + /** + * Lists previously created migration subtasks. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse + ]>; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; + listMigrationSubtasks( + request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; + listMigrationSubtasks( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>, + callback?: PaginationCallback< + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): + Promise<[ + protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, + protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listMigrationSubtasks(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listMigrationSubtasksAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listMigrationSubtasksStream( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.createStream( + this.innerApiCalls.listMigrationSubtasks as GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listMigrationSubtasks`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The migration task of the subtasks to list. + * Example: `projects/123/locations/us/workflows/1234` + * @param {google.protobuf.FieldMask} [request.readMask] + * Optional. The list of fields to be retrieved. + * @param {number} [request.pageSize] + * Optional. The maximum number of migration tasks to return. The service may return + * fewer than this number. + * @param {string} [request.pageToken] + * Optional. A page token, received from previous `ListMigrationSubtasks` call. + * Provide this to retrieve the subsequent page. + * + * When paginating, all other parameters provided to `ListMigrationSubtasks` + * must match the call that provided the page token. + * @param {string} [request.filter] + * Optional. The filter to apply. This can be used to get the subtasks of a specific + * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the + * task ID (not the name in the named map). + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v2alpha/migration_service.list_migration_subtasks.js + * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async + */ + listMigrationSubtasksAsync( + request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = this._gaxModule.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listMigrationSubtasks']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listMigrationSubtasks.asyncIterate( + this.innerApiCalls['listMigrationSubtasks'] as GaxCall, + request as {}, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified migrationSubtask resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @param {string} subtask + * @returns {string} Resource name string. + */ + migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { + return this.pathTemplates.migrationSubtaskPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + subtask: subtask, + }); + } + + /** + * Parse the project from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; + } + + /** + * Parse the location from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; + } + + /** + * Parse the workflow from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; + } + + /** + * Parse the subtask from MigrationSubtask resource. + * + * @param {string} migrationSubtaskName + * A fully-qualified path representing MigrationSubtask resource. + * @returns {string} A string representing the subtask. + */ + matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { + return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; + } + + /** + * Return a fully-qualified migrationWorkflow resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow + * @returns {string} Resource name string. + */ + migrationWorkflowPath(project:string,location:string,workflow:string) { + return this.pathTemplates.migrationWorkflowPathTemplate.render({ + project: project, + location: location, + workflow: workflow, + }); + } + + /** + * Parse the project from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the project. + */ + matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; + } + + /** + * Parse the location from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the location. + */ + matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; + } + + /** + * Parse the workflow from MigrationWorkflow resource. + * + * @param {string} migrationWorkflowName + * A fully-qualified path representing MigrationWorkflow resource. + * @returns {string} A string representing the workflow. + */ + matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { + return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.migrationServiceStub && !this._terminated) { + return this.migrationServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json new file mode 100644 index 0000000..2184b83 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json @@ -0,0 +1,73 @@ +{ + "interfaces": { + "google.cloud.bigquery.migration.v2alpha.MigrationService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 10000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetMigrationWorkflow": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationWorkflows": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "DeleteMigrationWorkflow": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartMigrationWorkflow": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "GetMigrationSubtask": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + }, + "ListMigrationSubtasks": { + "timeout_millis": 120000, + "retry_codes_name": "unavailable", + "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" + } + } + } + } +} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json new file mode 100644 index 0000000..8e91e42 --- /dev/null +++ b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json @@ -0,0 +1,8 @@ +[ + "../../protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto", + "../../protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto" +] diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js new file mode 100644 index 0000000..ecc7e4b --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js @@ -0,0 +1,27 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const migration = require('@google-cloud/bigquery-migration'); + +function main() { + const migrationServiceClient = new migration.MigrationServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts new file mode 100644 index 0000000..80fbe2d --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,32 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; + +// check that the client class type name can be used +function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const migrationServiceClient = new MigrationServiceClient(); + doStuffWithMigrationServiceClient(migrationServiceClient); +} + +main(); diff --git a/owl-bot-staging/v2alpha/system-test/install.ts b/owl-bot-staging/v2alpha/system-test/install.ts new file mode 100644 index 0000000..557a575 --- /dev/null +++ b/owl-bot-staging/v2alpha/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {packNTest} from 'pack-n-play'; +import {readFileSync} from 'fs'; +import {describe, it} from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts new file mode 100644 index 0000000..99aac57 --- /dev/null +++ b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts @@ -0,0 +1,1256 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import {describe, it} from 'mocha'; +import * as migrationserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v2alpha.MigrationServiceClient', () => { + describe('Common methods', () => { + it('has servicePath', () => { + const servicePath = migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + await client.initialize(); + assert(client.migrationServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.migrationServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.migrationServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + }); + + describe('createMigrationWorkflow', () => { + it('invokes createMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.createMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationWorkflow', () => { + it('invokes getMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationWorkflow(request), expectedError); + }); + }); + + describe('deleteMigrationWorkflow', () => { + it('invokes deleteMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.deleteMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); + }); + }); + + describe('startMigrationWorkflow', () => { + it('invokes startMigrationWorkflow without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); + const [response] = await client.startMigrationWorkflow(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startMigrationWorkflow( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startMigrationWorkflow with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startMigrationWorkflow with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.startMigrationWorkflow(request), expectedError); + }); + }); + + describe('getMigrationSubtask', () => { + it('invokes getMigrationSubtask without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); + const [response] = await client.getMigrationSubtask(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); + client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getMigrationSubtask( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getMigrationSubtask with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + assert((client.innerApiCalls.getMigrationSubtask as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getMigrationSubtask with closed client', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getMigrationSubtask(request), expectedError); + }); + }); + + describe('listMigrationWorkflows', () => { + it('invokes listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationWorkflows(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflows without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationWorkflows( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationWorkflows(request), expectedError); + assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationWorkflowsStream without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationWorkflowsStream with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationWorkflowsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), + ]; + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; + const iterable = client.listMigrationWorkflowsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationWorkflows with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationWorkflowsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('listMigrationSubtasks', () => { + it('invokes listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); + const [response] = await client.listMigrationSubtasks(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasks without error using callback', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listMigrationSubtasks( + request, + (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listMigrationSubtasks(request), expectedError); + assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listMigrationSubtasksStream without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listMigrationSubtasksStream with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listMigrationSubtasksStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; + stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks without error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), + ]; + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; + const iterable = client.listMigrationSubtasksAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listMigrationSubtasks with error', async () => { + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listMigrationSubtasksAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationSubtask', () => { + const fakePath = "/rendered/path/migrationSubtask"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + subtask: "subtaskValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationSubtaskPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationSubtaskPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationSubtaskPath', () => { + const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationSubtaskName', () => { + const result = client.matchProjectFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationSubtaskName', () => { + const result = client.matchLocationFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationSubtaskName', () => { + const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchSubtaskFromMigrationSubtaskName', () => { + const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); + assert.strictEqual(result, "subtaskValue"); + assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('migrationWorkflow', () => { + const fakePath = "/rendered/path/migrationWorkflow"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow: "workflowValue", + }; + const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.migrationWorkflowPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.migrationWorkflowPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('migrationWorkflowPath', () => { + const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromMigrationWorkflowName', () => { + const result = client.matchProjectFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromMigrationWorkflowName', () => { + const result = client.matchLocationFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowFromMigrationWorkflowName', () => { + const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); + assert.strictEqual(result, "workflowValue"); + assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v2alpha/tsconfig.json b/owl-bot-staging/v2alpha/tsconfig.json new file mode 100644 index 0000000..c78f1c8 --- /dev/null +++ b/owl-bot-staging/v2alpha/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v2alpha/webpack.config.js b/owl-bot-staging/v2alpha/webpack.config.js new file mode 100644 index 0000000..9178c4b --- /dev/null +++ b/owl-bot-staging/v2alpha/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'MigrationService', + filename: './migration-service.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; From 1013d2fa80af7b092a90144f9739cb20590829e7 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 30 Aug 2022 10:28:02 +0000 Subject: [PATCH 13/13] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- owl-bot-staging/v2/.eslintignore | 7 - owl-bot-staging/v2/.eslintrc.json | 3 - owl-bot-staging/v2/.gitignore | 14 - owl-bot-staging/v2/.jsdoc.js | 55 - owl-bot-staging/v2/.mocharc.js | 33 - owl-bot-staging/v2/.prettierrc.js | 22 - owl-bot-staging/v2/README.md | 1 - owl-bot-staging/v2/linkinator.config.json | 16 - owl-bot-staging/v2/package.json | 64 - .../migration/v2/migration_entities.proto | 233 --- .../v2/migration_error_details.proto | 62 - .../migration/v2/migration_metrics.proto | 111 -- .../migration/v2/migration_service.proto | 245 ---- .../migration/v2/translation_config.proto | 257 ---- ...ation_service.create_migration_workflow.js | 67 - ...ation_service.delete_migration_workflow.js | 62 - ...migration_service.get_migration_subtask.js | 66 - ...igration_service.get_migration_workflow.js | 66 - ...gration_service.list_migration_subtasks.js | 86 -- ...ration_service.list_migration_workflows.js | 80 -- ...ration_service.start_migration_workflow.js | 62 - ...ta.google.cloud.bigquery.migration.v2.json | 335 ----- owl-bot-staging/v2/src/index.ts | 25 - owl-bot-staging/v2/src/v2/gapic_metadata.json | 101 -- owl-bot-staging/v2/src/v2/index.ts | 19 - .../v2/src/v2/migration_service_client.ts | 1256 ----------------- .../v2/migration_service_client_config.json | 71 - .../src/v2/migration_service_proto_list.json | 7 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 32 - owl-bot-staging/v2/system-test/install.ts | 49 - .../v2/test/gapic_migration_service_v2.ts | 1256 ----------------- owl-bot-staging/v2/tsconfig.json | 19 - owl-bot-staging/v2/webpack.config.js | 64 - owl-bot-staging/v2alpha/.eslintignore | 7 - owl-bot-staging/v2alpha/.eslintrc.json | 3 - owl-bot-staging/v2alpha/.gitignore | 14 - owl-bot-staging/v2alpha/.jsdoc.js | 55 - owl-bot-staging/v2alpha/.mocharc.js | 33 - owl-bot-staging/v2alpha/.prettierrc.js | 22 - owl-bot-staging/v2alpha/README.md | 1 - .../v2alpha/linkinator.config.json | 16 - owl-bot-staging/v2alpha/package.json | 64 - .../migration/v2alpha/assessment_task.proto | 49 - .../v2alpha/migration_entities.proto | 244 ---- .../v2alpha/migration_error_details.proto | 62 - .../migration/v2alpha/migration_metrics.proto | 111 -- .../migration/v2alpha/migration_service.proto | 247 ---- .../migration/v2alpha/translation_task.proto | 207 --- ...ation_service.create_migration_workflow.js | 67 - ...ation_service.delete_migration_workflow.js | 62 - ...migration_service.get_migration_subtask.js | 66 - ...igration_service.get_migration_workflow.js | 66 - ...gration_service.list_migration_subtasks.js | 86 -- ...ration_service.list_migration_workflows.js | 80 -- ...ration_service.start_migration_workflow.js | 62 - ...ogle.cloud.bigquery.migration.v2alpha.json | 335 ----- owl-bot-staging/v2alpha/src/index.ts | 25 - .../v2alpha/src/v2alpha/gapic_metadata.json | 101 -- owl-bot-staging/v2alpha/src/v2alpha/index.ts | 19 - .../src/v2alpha/migration_service_client.ts | 1256 ----------------- .../migration_service_client_config.json | 73 - .../v2alpha/migration_service_proto_list.json | 8 - .../system-test/fixtures/sample/src/index.js | 27 - .../system-test/fixtures/sample/src/index.ts | 32 - .../v2alpha/system-test/install.ts | 49 - .../test/gapic_migration_service_v2alpha.ts | 1256 ----------------- owl-bot-staging/v2alpha/tsconfig.json | 19 - owl-bot-staging/v2alpha/webpack.config.js | 64 - 69 files changed, 9761 deletions(-) delete mode 100644 owl-bot-staging/v2/.eslintignore delete mode 100644 owl-bot-staging/v2/.eslintrc.json delete mode 100644 owl-bot-staging/v2/.gitignore delete mode 100644 owl-bot-staging/v2/.jsdoc.js delete mode 100644 owl-bot-staging/v2/.mocharc.js delete mode 100644 owl-bot-staging/v2/.prettierrc.js delete mode 100644 owl-bot-staging/v2/README.md delete mode 100644 owl-bot-staging/v2/linkinator.config.json delete mode 100644 owl-bot-staging/v2/package.json delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto delete mode 100644 owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js delete mode 100644 owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json delete mode 100644 owl-bot-staging/v2/src/index.ts delete mode 100644 owl-bot-staging/v2/src/v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/src/v2/index.ts delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_client.ts delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_client_config.json delete mode 100644 owl-bot-staging/v2/src/v2/migration_service_proto_list.json delete mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v2/system-test/install.ts delete mode 100644 owl-bot-staging/v2/test/gapic_migration_service_v2.ts delete mode 100644 owl-bot-staging/v2/tsconfig.json delete mode 100644 owl-bot-staging/v2/webpack.config.js delete mode 100644 owl-bot-staging/v2alpha/.eslintignore delete mode 100644 owl-bot-staging/v2alpha/.eslintrc.json delete mode 100644 owl-bot-staging/v2alpha/.gitignore delete mode 100644 owl-bot-staging/v2alpha/.jsdoc.js delete mode 100644 owl-bot-staging/v2alpha/.mocharc.js delete mode 100644 owl-bot-staging/v2alpha/.prettierrc.js delete mode 100644 owl-bot-staging/v2alpha/README.md delete mode 100644 owl-bot-staging/v2alpha/linkinator.config.json delete mode 100644 owl-bot-staging/v2alpha/package.json delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto delete mode 100644 owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js delete mode 100644 owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json delete mode 100644 owl-bot-staging/v2alpha/src/index.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/index.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json delete mode 100644 owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json delete mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v2alpha/system-test/install.ts delete mode 100644 owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts delete mode 100644 owl-bot-staging/v2alpha/tsconfig.json delete mode 100644 owl-bot-staging/v2alpha/webpack.config.js diff --git a/owl-bot-staging/v2/.eslintignore b/owl-bot-staging/v2/.eslintignore deleted file mode 100644 index cfc348e..0000000 --- a/owl-bot-staging/v2/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v2/.eslintrc.json b/owl-bot-staging/v2/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/owl-bot-staging/v2/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v2/.gitignore b/owl-bot-staging/v2/.gitignore deleted file mode 100644 index 5d32b23..0000000 --- a/owl-bot-staging/v2/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v2/.jsdoc.js b/owl-bot-staging/v2/.jsdoc.js deleted file mode 100644 index c3c1e3d..0000000 --- a/owl-bot-staging/v2/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/bigquery-migration', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v2/.mocharc.js b/owl-bot-staging/v2/.mocharc.js deleted file mode 100644 index 481c522..0000000 --- a/owl-bot-staging/v2/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v2/.prettierrc.js b/owl-bot-staging/v2/.prettierrc.js deleted file mode 100644 index 494e147..0000000 --- a/owl-bot-staging/v2/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v2/README.md b/owl-bot-staging/v2/README.md deleted file mode 100644 index 4e0341c..0000000 --- a/owl-bot-staging/v2/README.md +++ /dev/null @@ -1 +0,0 @@ -Migration: Nodejs Client diff --git a/owl-bot-staging/v2/linkinator.config.json b/owl-bot-staging/v2/linkinator.config.json deleted file mode 100644 index befd23c..0000000 --- a/owl-bot-staging/v2/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v2/package.json b/owl-bot-staging/v2/package.json deleted file mode 100644 index 24ced44..0000000 --- a/owl-bot-staging/v2/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "@google-cloud/bigquery-migration", - "version": "0.1.0", - "description": "Migration client for Node.js", - "repository": "googleapis/nodejs-migration", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google migration", - "migration", - "migration service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^3.3.1" - }, - "devDependencies": { - "@types/mocha": "^9.1.1", - "@types/node": "^16.11.56", - "@types/sinon": "^10.0.13", - "c8": "^7.12.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.11", - "jsdoc-fresh": "^2.0.1", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^4.0.2", - "mocha": "^10.0.0", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^8.4.0", - "typescript": "^4.8.2", - "webpack": "^4.46.0", - "webpack-cli": "^4.10.0" - }, - "engines": { - "node": ">=v12" - } -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto deleted file mode 100644 index 7d77bae..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_entities.proto +++ /dev/null @@ -1,233 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2/migration_metrics.proto"; -import "google/cloud/bigquery/migration/v2/translation_config.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationEntitiesProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// A migration workflow which specifies what needs to be done for an EDW -// migration. -message MigrationWorkflow { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}" - }; - - // Possible migration workflow states. - enum State { - // Workflow state is unspecified. - STATE_UNSPECIFIED = 0; - - // Workflow is in draft status, i.e. tasks are not yet eligible for - // execution. - DRAFT = 1; - - // Workflow is running (i.e. tasks are eligible for execution). - RUNNING = 2; - - // Workflow is paused. Tasks currently in progress may continue, but no - // further tasks will be scheduled. - PAUSED = 3; - - // Workflow is complete. There should not be any task in a non-terminal - // state, but if they are (e.g. forced termination), they will not be - // scheduled. - COMPLETED = 4; - } - - // Output only. Immutable. The unique identifier for the migration workflow. - // The ID is server-generated. - // - // Example: `projects/123/locations/us/workflows/345` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The display name of the workflow. This can be set to give a workflow - // a descriptive name. There is no guarantee or enforcement of uniqueness. - string display_name = 6; - - // The tasks in a workflow in a named map. The name (i.e. key) has no - // meaning and is merely a convenient way to address a specific task - // in a workflow. - map tasks = 2; - - // Output only. That status of the workflow. - State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the workflow was created. - google.protobuf.Timestamp create_time = 4; - - // Time when the workflow was last updated. - google.protobuf.Timestamp last_update_time = 5; -} - -// A single task for a migration which has details about the configuration of -// the task. -message MigrationTask { - // Possible states of a migration task. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The task is waiting for orchestration. - PENDING = 1; - - // The task is assigned to an orchestrator. - ORCHESTRATING = 2; - - // The task is running, i.e. its subtasks are ready for execution. - RUNNING = 3; - - // Tha task is paused. Assigned subtasks can continue, but no new subtasks - // will be scheduled. - PAUSED = 4; - - // The task finished successfully. - SUCCEEDED = 5; - - // The task finished unsuccessfully. - FAILED = 6; - } - - // The details of the task. - oneof task_details { - // Task configuration for Batch SQL Translation. - TranslationConfigDetails translation_config_details = 14; - } - - // Output only. Immutable. The unique identifier for the migration task. The - // ID is server-generated. - string id = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The type of the task. This must be one of the supported task types: - // Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ, - // Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ, - // Translation_Snowflake2BQ, Translation_Netezza2BQ, - // Translation_AzureSynapse2BQ, Translation_Vertica2BQ, - // Translation_SQLServer2BQ, Translation_Presto2BQ, Translation_MySQL2BQ. - string type = 2; - - // Output only. The current state of the task. - State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in - // FAILED state. - google.rpc.ErrorInfo processing_error = 5 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the task was created. - google.protobuf.Timestamp create_time = 6; - - // Time when the task was last updated. - google.protobuf.Timestamp last_update_time = 7; -} - -// A subtask for a migration which carries details about the configuration of -// the subtask. The content of the details should not matter to the end user, -// but is a contract between the subtask creator and subtask worker. -message MigrationSubtask { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" - }; - - // Possible states of a migration subtask. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The subtask is ready, i.e. it is ready for execution. - ACTIVE = 1; - - // The subtask is running, i.e. it is assigned to a worker for execution. - RUNNING = 2; - - // The subtask finished successfully. - SUCCEEDED = 3; - - // The subtask finished unsuccessfully. - FAILED = 4; - - // The subtask is paused, i.e., it will not be scheduled. If it was already - // assigned,it might still finish but no new lease renewals will be granted. - PAUSED = 5; - } - - // Output only. Immutable. The resource name for the migration subtask. The ID - // is server-generated. - // - // Example: `projects/123/locations/us/workflows/345/subtasks/678` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The unique ID of the task to which this subtask belongs. - string task_id = 2; - - // The type of the Subtask. The migration service does not check whether this - // is a known type. It is up to the task creator (i.e. orchestrator or worker) - // to ensure it only creates subtasks for which there are compatible workers - // polling for Subtasks. - string type = 3; - - // Output only. The current state of the subtask. - State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in - // FAILED state. - google.rpc.ErrorInfo processing_error = 6 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Provides details to errors and issues encountered while - // processing the subtask. Presence of error details does not mean that the - // subtask failed. - repeated ResourceErrorDetail resource_error_details = 12 - [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The number or resources with errors. Note: This is not the total - // number of errors as each resource can have more than one error. - // This is used to indicate truncation by having a `resource_error_count` - // that is higher than the size of `resource_error_details`. - int32 resource_error_count = 13; - - // Time when the subtask was created. - google.protobuf.Timestamp create_time = 7; - - // Time when the subtask was last updated. - google.protobuf.Timestamp last_update_time = 8; - - // The metrics for the subtask. - repeated TimeSeries metrics = 11; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto deleted file mode 100644 index 199e2db..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_error_details.proto +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/field_behavior.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationErrorDetailsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// Provides details for errors and the corresponding resources. -message ResourceErrorDetail { - // Required. Information about the resource where the error is located. - google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The error details for the resource. - repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. How many errors there are in total for the resource. Truncation can be - // indicated by having an `error_count` that is higher than the size of - // `error_details`. - int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Provides details for errors, e.g. issues that where encountered when -// processing a subtask. -message ErrorDetail { - // Optional. The exact location within the resource (if applicable). - ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Describes the cause of the error with structured detail. - google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Holds information about where the error is located. -message ErrorLocation { - // Optional. If applicable, denotes the line where the error occurred. A zero value - // means that there is no line information. - int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If applicable, denotes the column where the error occurred. A zero value - // means that there is no columns information. - int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto deleted file mode 100644 index e52fead..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_metrics.proto +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/distribution.proto"; -import "google/api/field_behavior.proto"; -import "google/api/metric.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationMetricsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// The metrics object for a SubTask. -message TimeSeries { - // Required. The name of the metric. - // - // If the metric is not known by the service yet, it will be auto-created. - string metric = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The value type of the time series. - google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric kind of the time series. - // - // If present, it must be the same as the metric kind of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // this field specifies the metric kind of the new descriptor and must be - // either `GAUGE` (the default) or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time does not need to be supplied, but if it is supplied, it must - // equal the end time. For `DELTA` metrics, the start and end time should - // specify a non-zero interval, with subsequent points specifying contiguous - // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end - // time should specify a non-zero interval, with subsequent points specifying - // the same start time and increasing end times, until an event resets the - // cumulative value to zero and sets a new start time for the following - // points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A time interval extending just after a start time through an end time. -// If the start time is the same as the end time, then the interval -// represents a single point in time. -message TimeInterval { - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately `+/-9.2x10^18`. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately `+/-10^(+/-300)` and it has 16 significant digits of - // precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto deleted file mode 100644 index 3c1a89e..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/migration_service.proto +++ /dev/null @@ -1,245 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2/migration_entities.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationServiceProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// Service to handle EDW migrations. -service MigrationService { - option (google.api.default_host) = "bigquerymigration.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a migration workflow. - rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - post: "/v2/{parent=projects/*/locations/*}/workflows" - body: "migration_workflow" - }; - option (google.api.method_signature) = "parent,migration_workflow"; - } - - // Gets a previously created migration workflow. - rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - get: "/v2/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration workflow. - rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { - option (google.api.http) = { - get: "/v2/{parent=projects/*/locations/*}/workflows" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a migration workflow by name. - rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Starts a previously created migration workflow. I.e., the state transitions - // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - // An error will be signaled if the state is anything other than DRAFT or - // RUNNING. - rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2/{name=projects/*/locations/*/workflows/*}:start" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Gets a previously created migration subtask. - rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { - option (google.api.http) = { - get: "/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration subtasks. - rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { - option (google.api.http) = { - get: "/v2/{parent=projects/*/locations/*/workflows/*}/subtasks" - }; - option (google.api.method_signature) = "parent"; - } -} - -// Request to create a migration workflow resource. -message CreateMigrationWorkflowRequest { - // Required. The name of the project to which this migration workflow belongs. - // Example: `projects/foo/locations/bar` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // Required. The migration workflow to create. - MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to get a previously created migration workflow. -message GetMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; -} - -// A request to list previously created migration workflows. -message ListMigrationWorkflowsRequest { - // Required. The project and location of the migration workflows to list. - // Example: `projects/123/locations/us` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; - - // The maximum number of migration workflows to return. The service may return - // fewer than this number. - int32 page_size = 3; - - // A page token, received from previous `ListMigrationWorkflows` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationWorkflows` - // must match the call that provided the page token. - string page_token = 4; -} - -// Response object for a `ListMigrationWorkflows` call. -message ListMigrationWorkflowsResponse { - // The migration workflows for the specified project / location. - repeated MigrationWorkflow migration_workflows = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a previously created migration workflow. -message DeleteMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to start a previously created migration workflow. -message StartMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to get a previously created migration subtasks. -message GetMigrationSubtaskRequest { - // Required. The unique identifier for the migration subtask. - // Example: `projects/123/locations/us/workflows/1234/subtasks/543` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to list previously created migration subtasks. -message ListMigrationSubtasksRequest { - // Required. The migration task of the subtasks to list. - // Example: `projects/123/locations/us/workflows/1234` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of migration tasks to return. The service may return - // fewer than this number. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token, received from previous `ListMigrationSubtasks` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationSubtasks` - // must match the call that provided the page token. - string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The filter to apply. This can be used to get the subtasks of a specific - // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - // task ID (not the name in the named map). - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response object for a `ListMigrationSubtasks` call. -message ListMigrationSubtasksResponse { - // The migration subtasks for the specified task. - repeated MigrationSubtask migration_subtasks = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} diff --git a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto b/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto deleted file mode 100644 index 994140d..0000000 --- a/owl-bot-staging/v2/protos/google/cloud/bigquery/migration/v2/translation_config.proto +++ /dev/null @@ -1,257 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2;migration"; -option java_multiple_files = true; -option java_outer_classname = "TranslationConfigProto"; -option java_package = "com.google.cloud.bigquery.migration.v2"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2"; - -// The translation config to capture necessary settings for a translation task -// and subtask. -message TranslationConfigDetails { - // The chosen path where the source for input files will be found. - oneof source_location { - // The Cloud Storage path for a directory of files to translate in a task. - string gcs_source_path = 1; - } - - // The chosen path where the destination for output files will be found. - oneof target_location { - // The Cloud Storage path to write back the corresponding input files to. - string gcs_target_path = 2; - } - - // The dialect of the input files. - Dialect source_dialect = 3; - - // The target dialect for the engine to translate the input to. - Dialect target_dialect = 4; - - // The mapping of full SQL object names from their current state to the - // desired output. - oneof output_name_mapping { - // The mapping of objects to their desired output names in list form. - ObjectNameMappingList name_mapping_list = 5; - } - - // The default source environment values for the translation. - SourceEnv source_env = 6; -} - -// The possible dialect options for translation. -message Dialect { - // The possible dialect options that this message represents. - oneof dialect_value { - // The BigQuery dialect - BigQueryDialect bigquery_dialect = 1; - - // The HiveQL dialect - HiveQLDialect hiveql_dialect = 2; - - // The Redshift dialect - RedshiftDialect redshift_dialect = 3; - - // The Teradata dialect - TeradataDialect teradata_dialect = 4; - - // The Oracle dialect - OracleDialect oracle_dialect = 5; - - // The SparkSQL dialect - SparkSQLDialect sparksql_dialect = 6; - - // The Snowflake dialect - SnowflakeDialect snowflake_dialect = 7; - - // The Netezza dialect - NetezzaDialect netezza_dialect = 8; - - // The Azure Synapse dialect - AzureSynapseDialect azure_synapse_dialect = 9; - - // The Vertica dialect - VerticaDialect vertica_dialect = 10; - - // The SQL Server dialect - SQLServerDialect sql_server_dialect = 11; - - // The Postgresql dialect - PostgresqlDialect postgresql_dialect = 12; - - // The Presto dialect - PrestoDialect presto_dialect = 13; - - // The MySQL dialect - MySQLDialect mysql_dialect = 14; - } -} - -// The dialect definition for BigQuery. -message BigQueryDialect {} - -// The dialect definition for HiveQL. -message HiveQLDialect {} - -// The dialect definition for Redshift. -message RedshiftDialect {} - -// The dialect definition for Teradata. -message TeradataDialect { - // The sub-dialect options for Teradata. - enum Mode { - // Unspecified mode. - MODE_UNSPECIFIED = 0; - - // Teradata SQL mode. - SQL = 1; - - // BTEQ mode (which includes SQL). - BTEQ = 2; - } - - // Which Teradata sub-dialect mode the user specifies. - Mode mode = 1; -} - -// The dialect definition for Oracle. -message OracleDialect {} - -// The dialect definition for SparkSQL. -message SparkSQLDialect {} - -// The dialect definition for Snowflake. -message SnowflakeDialect {} - -// The dialect definition for Netezza. -message NetezzaDialect {} - -// The dialect definition for Azure Synapse. -message AzureSynapseDialect {} - -// The dialect definition for Vertica. -message VerticaDialect {} - -// The dialect definition for SQL Server. -message SQLServerDialect {} - -// The dialect definition for Postgresql. -message PostgresqlDialect {} - -// The dialect definition for Presto. -message PrestoDialect {} - -// The dialect definition for MySQL. -message MySQLDialect {} - -// Represents a map of name mappings using a list of key:value proto messages of -// existing name to desired output name. -message ObjectNameMappingList { - // The elements of the object name map. - repeated ObjectNameMapping name_map = 1; -} - -// Represents a key-value pair of NameMappingKey to NameMappingValue to -// represent the mapping of SQL names from the input value to desired output. -message ObjectNameMapping { - // The name of the object in source that is being mapped. - NameMappingKey source = 1; - - // The desired target name of the object that is being mapped. - NameMappingValue target = 2; -} - -// The potential components of a full name mapping that will be mapped -// during translation in the source data warehouse. -message NameMappingKey { - // The type of the object that is being mapped. - enum Type { - // Unspecified name mapping type. - TYPE_UNSPECIFIED = 0; - - // The object being mapped is a database. - DATABASE = 1; - - // The object being mapped is a schema. - SCHEMA = 2; - - // The object being mapped is a relation. - RELATION = 3; - - // The object being mapped is an attribute. - ATTRIBUTE = 4; - - // The object being mapped is a relation alias. - RELATION_ALIAS = 5; - - // The object being mapped is a an attribute alias. - ATTRIBUTE_ALIAS = 6; - - // The object being mapped is a function. - FUNCTION = 7; - } - - // The type of object that is being mapped. - Type type = 1; - - // The database name (BigQuery project ID equivalent in the source data - // warehouse). - string database = 2; - - // The schema name (BigQuery dataset equivalent in the source data warehouse). - string schema = 3; - - // The relation name (BigQuery table or view equivalent in the source data - // warehouse). - string relation = 4; - - // The attribute name (BigQuery column equivalent in the source data - // warehouse). - string attribute = 5; -} - -// The potential components of a full name mapping that will be mapped -// during translation in the target data warehouse. -message NameMappingValue { - // The database name (BigQuery project ID equivalent in the target data - // warehouse). - string database = 1; - - // The schema name (BigQuery dataset equivalent in the target data warehouse). - string schema = 2; - - // The relation name (BigQuery table or view equivalent in the target data - // warehouse). - string relation = 3; - - // The attribute name (BigQuery column equivalent in the target data - // warehouse). - string attribute = 4; -} - -// Represents the default source environment values for the translation. -message SourceEnv { - // The default database name to fully qualify SQL objects when their database - // name is missing. - string default_database = 1; - - // The schema search path. When SQL objects are missing schema name, - // translation engine will search through this list to find the value. - repeated string schema_search_path = 2; -} diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js deleted file mode 100644 index 8301c3a..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.create_migration_workflow.js +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, migrationWorkflow) { - // [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - */ - // const parent = 'abc123' - /** - * Required. The migration workflow to create. - */ - // const migrationWorkflow = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callCreateMigrationWorkflow() { - // Construct request - const request = { - parent, - migrationWorkflow, - }; - - // Run request - const response = await migrationClient.createMigrationWorkflow(request); - console.log(response); - } - - callCreateMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js deleted file mode 100644 index 9f0651e..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.delete_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callDeleteMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.deleteMigrationWorkflow(request); - console.log(response); - } - - callDeleteMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js deleted file mode 100644 index 25de9e0..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_subtask.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - */ - // const name = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationSubtask() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationSubtask(request); - console.log(response); - } - - callGetMigrationSubtask(); - // [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js deleted file mode 100644 index 52ab5cd..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.get_migration_workflow.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationWorkflow(request); - console.log(response); - } - - callGetMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js deleted file mode 100644 index c5c7ed0..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_subtasks.js +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const parent = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - /** - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - /** - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - */ - // const filter = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationSubtasks() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationSubtasksAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationSubtasks(); - // [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js deleted file mode 100644 index ebd2127..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.list_migration_workflows.js +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - */ - // const parent = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - /** - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationWorkflows() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationWorkflowsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationWorkflows(); - // [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js b/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js deleted file mode 100644 index 7f8257d..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/migration_service.start_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callStartMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.startMigrationWorkflow(request); - console.log(response); - } - - callStartMigrationWorkflow(); - // [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json b/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json deleted file mode 100644 index 81ec8bb..0000000 --- a/owl-bot-staging/v2/samples/generated/v2/snippet_metadata.google.cloud.bigquery.migration.v2.json +++ /dev/null @@ -1,335 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-migration", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2", - "version": "v2" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", - "title": "MigrationService createMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Creates a migration workflow.", - "canonical": true, - "file": "migration_service.create_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 59, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "migration_workflow", - "type": ".google.cloud.bigquery.migration.v2.MigrationWorkflow" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", - "title": "MigrationService getMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration workflow.", - "canonical": true, - "file": "migration_service.get_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", - "title": "MigrationService listMigrationWorkflows Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration workflow.", - "canonical": true, - "file": "migration_service.list_migration_workflows.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 72, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", - "title": "MigrationService deleteMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Deletes a migration workflow by name.", - "canonical": true, - "file": "migration_service.delete_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", - "title": "MigrationService startMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", - "canonical": true, - "file": "migration_service.start_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", - "title": "MigrationService getMigrationSubtask Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration subtask.", - "canonical": true, - "file": "migration_service.get_migration_subtask.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.MigrationSubtask", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", - "title": "MigrationService listMigrationSubtasks Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration subtasks.", - "canonical": true, - "file": "migration_service.list_migration_subtasks.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 78, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2.MigrationService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v2/src/index.ts b/owl-bot-staging/v2/src/index.ts deleted file mode 100644 index 35a8fd9..0000000 --- a/owl-bot-staging/v2/src/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v2 from './v2'; -const MigrationServiceClient = v2.MigrationServiceClient; -type MigrationServiceClient = v2.MigrationServiceClient; -export {v2, MigrationServiceClient}; -export default {v2, MigrationServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v2/src/v2/gapic_metadata.json b/owl-bot-staging/v2/src/v2/gapic_metadata.json deleted file mode 100644 index 1b6a33c..0000000 --- a/owl-bot-staging/v2/src/v2/gapic_metadata.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.migration.v2", - "libraryPackage": "@google-cloud/bigquery-migration", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/src/v2/index.ts b/owl-bot-staging/v2/src/v2/index.ts deleted file mode 100644 index f75d208..0000000 --- a/owl-bot-staging/v2/src/v2/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2/src/v2/migration_service_client.ts b/owl-bot-staging/v2/src/v2/migration_service_client.ts deleted file mode 100644 index cfb3f86..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_client.ts +++ /dev/null @@ -1,1256 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import type * as gax from 'google-gax'; -import type {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; -import {Transform} from 'stream'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v2/migration_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './migration_service_client_config.json'; -const version = require('../../../package.json').version; - -/** - * Service to handle EDW migrations. - * @class - * @memberof v2 - */ -export class MigrationServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - migrationServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of MigrationServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you - * need to avoid loading the default gRPC version and want to use the fallback - * HTTP implementation. Load only fallback version and pass it to the constructor: - * ``` - * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC - * const client = new MigrationServiceClient({fallback: 'rest'}, gax); - * ``` - */ - constructor(opts?: ClientOptions, gaxInstance?: typeof gax | typeof gax.fallback) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof MigrationServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Load google-gax module synchronously if needed - if (!gaxInstance) { - gaxInstance = require('google-gax') as typeof gax; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' - ), - migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listMigrationWorkflows: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), - listMigrationSubtasks: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.migration.v2.MigrationService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = this._gaxModule.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.migrationServiceStub) { - return this.migrationServiceStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.migration.v2.MigrationService. - this.migrationServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2.MigrationService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.migration.v2.MigrationService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const migrationServiceStubMethods = - ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; - for (const methodName of migrationServiceStubMethods) { - const callPromise = this.migrationServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor, - this._opts.fallback - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.migrationServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates a migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - * @param {google.cloud.bigquery.migration.v2.MigrationWorkflow} request.migrationWorkflow - * Required. The migration workflow to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.create_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async - */ - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.get_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async - */ - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationWorkflow(request, options, callback); - } -/** - * Deletes a migration workflow by name. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.delete_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async - */ - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); - } -/** - * Starts a previously created migration workflow. I.e., the state transitions - * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - * An error will be signaled if the state is anything other than DRAFT or - * RUNNING. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.start_migration_workflow.js - * region_tag:bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async - */ - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.startMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration subtask. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.get_migration_subtask.js - * region_tag:bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async - */ - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationSubtask(request, options, callback); - } - - /** - * Lists previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse - ]>; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): void; - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationWorkflows(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflowsStream( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationWorkflows`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.list_migration_workflows.js - * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async - */ - listMigrationWorkflowsAsync( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationWorkflowsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.asyncIterate( - this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - /** - * Lists previously created migration subtasks. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse - ]>; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): void; - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask>): - Promise<[ - protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationSubtasks(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasksStream( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationSubtasks`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2.MigrationSubtask}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2/migration_service.list_migration_subtasks.js - * region_tag:bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async - */ - listMigrationSubtasksAsync( - request?: protos.google.cloud.bigquery.migration.v2.IListMigrationSubtasksRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.asyncIterate( - this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified migrationSubtask resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @param {string} subtask - * @returns {string} Resource name string. - */ - migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { - return this.pathTemplates.migrationSubtaskPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - subtask: subtask, - }); - } - - /** - * Parse the project from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; - } - - /** - * Parse the location from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; - } - - /** - * Parse the workflow from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; - } - - /** - * Parse the subtask from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the subtask. - */ - matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; - } - - /** - * Return a fully-qualified migrationWorkflow resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @returns {string} Resource name string. - */ - migrationWorkflowPath(project:string,location:string,workflow:string) { - return this.pathTemplates.migrationWorkflowPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - }); - } - - /** - * Parse the project from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; - } - - /** - * Parse the location from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; - } - - /** - * Parse the workflow from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.migrationServiceStub && !this._terminated) { - return this.migrationServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v2/src/v2/migration_service_client_config.json b/owl-bot-staging/v2/src/v2/migration_service_client_config.json deleted file mode 100644 index 5832815..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_client_config.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.migration.v2.MigrationService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationWorkflows": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "DeleteMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "GetMigrationSubtask": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "ListMigrationSubtasks": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json b/owl-bot-staging/v2/src/v2/migration_service_proto_list.json deleted file mode 100644 index 57df7ab..0000000 --- a/owl-bot-staging/v2/src/v2/migration_service_proto_list.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/migration/v2/migration_entities.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_error_details.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_metrics.proto", - "../../protos/google/cloud/bigquery/migration/v2/migration_service.proto", - "../../protos/google/cloud/bigquery/migration/v2/translation_config.proto" -] diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js deleted file mode 100644 index ecc7e4b..0000000 --- a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/bigquery-migration'); - -function main() { - const migrationServiceClient = new migration.MigrationServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 80fbe2d..0000000 --- a/owl-bot-staging/v2/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; - -// check that the client class type name can be used -function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const migrationServiceClient = new MigrationServiceClient(); - doStuffWithMigrationServiceClient(migrationServiceClient); -} - -main(); diff --git a/owl-bot-staging/v2/system-test/install.ts b/owl-bot-staging/v2/system-test/install.ts deleted file mode 100644 index 557a575..0000000 --- a/owl-bot-staging/v2/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {packNTest} from 'pack-n-play'; -import {readFileSync} from 'fs'; -import {describe, it} from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts b/owl-bot-staging/v2/test/gapic_migration_service_v2.ts deleted file mode 100644 index 061c58c..0000000 --- a/owl-bot-staging/v2/test/gapic_migration_service_v2.ts +++ /dev/null @@ -1,1256 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import {describe, it} from 'mocha'; -import * as migrationserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v2.MigrationServiceClient', () => { - describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = migrationserviceModule.v2.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = migrationserviceModule.v2.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - }); - - describe('createMigrationWorkflow', () => { - it('invokes createMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.createMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationWorkflow', () => { - it('invokes getMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - }); - }); - - describe('deleteMigrationWorkflow', () => { - it('invokes deleteMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.deleteMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - }); - }); - - describe('startMigrationWorkflow', () => { - it('invokes startMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.startMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationSubtask', () => { - it('invokes getMigrationSubtask without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationSubtask(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationSubtask( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationSubtask with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask with closed client', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - }); - }); - - describe('listMigrationWorkflows', () => { - it('invokes listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationWorkflows(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflows without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationWorkflows( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationWorkflows(request), expectedError); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflowsStream without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationWorkflowsStream with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; - const iterable = client.listMigrationWorkflowsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationWorkflowsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationWorkflow[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('listMigrationSubtasks', () => { - it('invokes listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationSubtasks(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasks without error using callback', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationSubtasks( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationSubtasks(request), expectedError); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasksStream without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationSubtasksStream with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; - const iterable = client.listMigrationSubtasksAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationSubtasksAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2.IMigrationSubtask[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationSubtask', () => { - const fakePath = "/rendered/path/migrationSubtask"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - subtask: "subtaskValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationSubtaskPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationSubtaskPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationSubtaskPath', () => { - const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationSubtaskName', () => { - const result = client.matchProjectFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationSubtaskName', () => { - const result = client.matchLocationFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationSubtaskName', () => { - const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchSubtaskFromMigrationSubtaskName', () => { - const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "subtaskValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationWorkflow', () => { - const fakePath = "/rendered/path/migrationWorkflow"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - }; - const client = new migrationserviceModule.v2.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationWorkflowPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationWorkflowPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationWorkflowPath', () => { - const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationWorkflowName', () => { - const result = client.matchProjectFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationWorkflowName', () => { - const result = client.matchLocationFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationWorkflowName', () => { - const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v2/tsconfig.json b/owl-bot-staging/v2/tsconfig.json deleted file mode 100644 index c78f1c8..0000000 --- a/owl-bot-staging/v2/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v2/webpack.config.js b/owl-bot-staging/v2/webpack.config.js deleted file mode 100644 index 9178c4b..0000000 --- a/owl-bot-staging/v2/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'MigrationService', - filename: './migration-service.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -}; diff --git a/owl-bot-staging/v2alpha/.eslintignore b/owl-bot-staging/v2alpha/.eslintignore deleted file mode 100644 index cfc348e..0000000 --- a/owl-bot-staging/v2alpha/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v2alpha/.eslintrc.json b/owl-bot-staging/v2alpha/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/owl-bot-staging/v2alpha/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v2alpha/.gitignore b/owl-bot-staging/v2alpha/.gitignore deleted file mode 100644 index 5d32b23..0000000 --- a/owl-bot-staging/v2alpha/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v2alpha/.jsdoc.js b/owl-bot-staging/v2alpha/.jsdoc.js deleted file mode 100644 index c3c1e3d..0000000 --- a/owl-bot-staging/v2alpha/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/bigquery-migration', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v2alpha/.mocharc.js b/owl-bot-staging/v2alpha/.mocharc.js deleted file mode 100644 index 481c522..0000000 --- a/owl-bot-staging/v2alpha/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v2alpha/.prettierrc.js b/owl-bot-staging/v2alpha/.prettierrc.js deleted file mode 100644 index 494e147..0000000 --- a/owl-bot-staging/v2alpha/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v2alpha/README.md b/owl-bot-staging/v2alpha/README.md deleted file mode 100644 index 4e0341c..0000000 --- a/owl-bot-staging/v2alpha/README.md +++ /dev/null @@ -1 +0,0 @@ -Migration: Nodejs Client diff --git a/owl-bot-staging/v2alpha/linkinator.config.json b/owl-bot-staging/v2alpha/linkinator.config.json deleted file mode 100644 index befd23c..0000000 --- a/owl-bot-staging/v2alpha/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v2alpha/package.json b/owl-bot-staging/v2alpha/package.json deleted file mode 100644 index 24ced44..0000000 --- a/owl-bot-staging/v2alpha/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "@google-cloud/bigquery-migration", - "version": "0.1.0", - "description": "Migration client for Node.js", - "repository": "googleapis/nodejs-migration", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google migration", - "migration", - "migration service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^3.3.1" - }, - "devDependencies": { - "@types/mocha": "^9.1.1", - "@types/node": "^16.11.56", - "@types/sinon": "^10.0.13", - "c8": "^7.12.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.11", - "jsdoc-fresh": "^2.0.1", - "jsdoc-region-tag": "^2.0.0", - "linkinator": "^4.0.2", - "mocha": "^10.0.0", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^14.0.0", - "ts-loader": "^8.4.0", - "typescript": "^4.8.2", - "webpack": "^4.46.0", - "webpack-cli": "^4.10.0" - }, - "engines": { - "node": ">=v12" - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto deleted file mode 100644 index 0c6ea13..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "AssessmentTaskProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Assessment task config. -message AssessmentTaskDetails { - // Required. The Cloud Storage path for assessment input files. - string input_path = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The BigQuery dataset for output. - string output_dataset = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. An optional Cloud Storage path to write the query logs (which is - // then used as an input path on the translation task) - string querylogs_path = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data source or data warehouse type (eg: TERADATA/REDSHIFT) - // from which the input data is extracted. - string data_source = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// Details for an assessment task orchestration result. -message AssessmentOrchestrationResultDetails { - // Optional. The version used for the output table schemas. - string output_tables_schema_version = 1 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto deleted file mode 100644 index 50d4c75..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto +++ /dev/null @@ -1,244 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2alpha/assessment_task.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; -import "google/cloud/bigquery/migration/v2alpha/translation_task.proto"; -import "google/protobuf/any.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationEntitiesProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// A migration workflow which specifies what needs to be done for an EDW -// migration. -message MigrationWorkflow { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}" - }; - - // Possible migration workflow states. - enum State { - // Workflow state is unspecified. - STATE_UNSPECIFIED = 0; - - // Workflow is in draft status, i.e. tasks are not yet eligible for - // execution. - DRAFT = 1; - - // Workflow is running (i.e. tasks are eligible for execution). - RUNNING = 2; - - // Workflow is paused. Tasks currently in progress may continue, but no - // further tasks will be scheduled. - PAUSED = 3; - - // Workflow is complete. There should not be any task in a non-terminal - // state, but if they are (e.g. forced termination), they will not be - // scheduled. - COMPLETED = 4; - } - - // Output only. Immutable. The unique identifier for the migration workflow. The ID is - // server-generated. - // - // Example: `projects/123/locations/us/workflows/345` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The display name of the workflow. This can be set to give a workflow - // a descriptive name. There is no guarantee or enforcement of uniqueness. - string display_name = 6; - - // The tasks in a workflow in a named map. The name (i.e. key) has no - // meaning and is merely a convenient way to address a specific task - // in a workflow. - map tasks = 2; - - // Output only. That status of the workflow. - State state = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the workflow was created. - google.protobuf.Timestamp create_time = 4; - - // Time when the workflow was last updated. - google.protobuf.Timestamp last_update_time = 5; -} - -// A single task for a migration which has details about the configuration of -// the task. -message MigrationTask { - // Possible states of a migration task. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The task is waiting for orchestration. - PENDING = 1; - - // The task is assigned to an orchestrator. - ORCHESTRATING = 2; - - // The task is running, i.e. its subtasks are ready for execution. - RUNNING = 3; - - // Tha task is paused. Assigned subtasks can continue, but no new subtasks - // will be scheduled. - PAUSED = 4; - - // The task finished successfully. - SUCCEEDED = 5; - - // The task finished unsuccessfully. - FAILED = 6; - } - - // The details of the task. - oneof task_details { - // Task configuration for Assessment. - AssessmentTaskDetails assessment_task_details = 12; - - // Task configuration for Batch/Offline SQL Translation. - TranslationTaskDetails translation_task_details = 13; - } - - // Output only. Immutable. The unique identifier for the migration task. The ID is server-generated. - string id = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The type of the task. This must be a supported task type. - string type = 2; - - // DEPRECATED! Use one of the task_details below. - // The details of the task. The type URL must be one of the supported task - // details messages and correspond to the Task's type. - google.protobuf.Any details = 3; - - // Output only. The current state of the task. - State state = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in FAILED state. - google.rpc.ErrorInfo processing_error = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Time when the task was created. - google.protobuf.Timestamp create_time = 6; - - // Time when the task was last updated. - google.protobuf.Timestamp last_update_time = 7; - - // Output only. Additional information about the orchestration. - MigrationTaskOrchestrationResult orchestration_result = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A subtask for a migration which carries details about the configuration of -// the subtask. The content of the details should not matter to the end user, -// but is a contract between the subtask creator and subtask worker. -message MigrationSubtask { - option (google.api.resource) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - pattern: "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}" - }; - - // Possible states of a migration subtask. - enum State { - // The state is unspecified. - STATE_UNSPECIFIED = 0; - - // The subtask is ready, i.e. it is ready for execution. - ACTIVE = 1; - - // The subtask is running, i.e. it is assigned to a worker for execution. - RUNNING = 2; - - // The subtask finished successfully. - SUCCEEDED = 3; - - // The subtask finished unsuccessfully. - FAILED = 4; - - // The subtask is paused, i.e., it will not be scheduled. If it was already - // assigned,it might still finish but no new lease renewals will be granted. - PAUSED = 5; - } - - // Output only. Immutable. The resource name for the migration subtask. The ID is - // server-generated. - // - // Example: `projects/123/locations/us/workflows/345/subtasks/678` - string name = 1 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = IMMUTABLE - ]; - - // The unique ID of the task to which this subtask belongs. - string task_id = 2; - - // The type of the Subtask. The migration service does not check whether this - // is a known type. It is up to the task creator (i.e. orchestrator or worker) - // to ensure it only creates subtasks for which there are compatible workers - // polling for Subtasks. - string type = 3; - - // Output only. The current state of the subtask. - State state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. An explanation that may be populated when the task is in FAILED state. - google.rpc.ErrorInfo processing_error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Provides details to errors and issues encountered while processing the - // subtask. Presence of error details does not mean that the subtask failed. - repeated ResourceErrorDetail resource_error_details = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The number or resources with errors. Note: This is not the total - // number of errors as each resource can have more than one error. - // This is used to indicate truncation by having a `resource_error_count` - // that is higher than the size of `resource_error_details`. - int32 resource_error_count = 13; - - // Time when the subtask was created. - google.protobuf.Timestamp create_time = 7; - - // Time when the subtask was last updated. - google.protobuf.Timestamp last_update_time = 8; - - // The metrics for the subtask. - repeated TimeSeries metrics = 11; -} - -// Additional information from the orchestrator when it is done with the -// task orchestration. -message MigrationTaskOrchestrationResult { - // Details specific to the task type. - oneof details { - // Details specific to assessment task types. - AssessmentOrchestrationResultDetails assessment_details = 1; - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto deleted file mode 100644 index 89dac5e..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/field_behavior.proto"; -import "google/rpc/error_details.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationErrorDetailsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Provides details for errors and the corresponding resources. -message ResourceErrorDetail { - // Required. Information about the resource where the error is located. - google.rpc.ResourceInfo resource_info = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The error details for the resource. - repeated ErrorDetail error_details = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. How many errors there are in total for the resource. Truncation can be - // indicated by having an `error_count` that is higher than the size of - // `error_details`. - int32 error_count = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// Provides details for errors, e.g. issues that where encountered when -// processing a subtask. -message ErrorDetail { - // Optional. The exact location within the resource (if applicable). - ErrorLocation location = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Describes the cause of the error with structured detail. - google.rpc.ErrorInfo error_info = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Holds information about where the error is located. -message ErrorLocation { - // Optional. If applicable, denotes the line where the error occurred. A zero value - // means that there is no line information. - int32 line = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If applicable, denotes the column where the error occurred. A zero value - // means that there is no columns information. - int32 column = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto deleted file mode 100644 index ce60dd2..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/distribution.proto"; -import "google/api/field_behavior.proto"; -import "google/api/metric.proto"; -import "google/protobuf/timestamp.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationMetricsProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// The metrics object for a SubTask. -message TimeSeries { - // Required. The name of the metric. - // - // If the metric is not known by the service yet, it will be auto-created. - string metric = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The value type of the time series. - google.api.MetricDescriptor.ValueType value_type = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric kind of the time series. - // - // If present, it must be the same as the metric kind of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // this field specifies the metric kind of the new descriptor and must be - // either `GAUGE` (the default) or `CUMULATIVE`. - google.api.MetricDescriptor.MetricKind metric_kind = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The data points of this time series. When listing time series, points are - // returned in reverse time order. - // - // When creating a time series, this field must contain exactly one point and - // the point's type must be the same as the value type of the associated - // metric. If the associated metric's descriptor must be auto-created, then - // the value type of the descriptor is determined by the point's type, which - // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. - repeated Point points = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// A single data point in a time series. -message Point { - // The time interval to which the data point applies. For `GAUGE` metrics, - // the start time does not need to be supplied, but if it is supplied, it must - // equal the end time. For `DELTA` metrics, the start and end time should - // specify a non-zero interval, with subsequent points specifying contiguous - // and non-overlapping intervals. For `CUMULATIVE` metrics, the start and end - // time should specify a non-zero interval, with subsequent points specifying - // the same start time and increasing end times, until an event resets the - // cumulative value to zero and sets a new start time for the following - // points. - TimeInterval interval = 1; - - // The value of the data point. - TypedValue value = 2; -} - -// A time interval extending just after a start time through an end time. -// If the start time is the same as the end time, then the interval -// represents a single point in time. -message TimeInterval { - // Optional. The beginning of the time interval. The default value - // for the start time is the end time. The start time must not be - // later than the end time. - google.protobuf.Timestamp start_time = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The end of the time interval. - google.protobuf.Timestamp end_time = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A single strongly-typed value. -message TypedValue { - // The typed value field. - oneof value { - // A Boolean value: `true` or `false`. - bool bool_value = 1; - - // A 64-bit integer. Its range is approximately +/-9.2x10^18. - int64 int64_value = 2; - - // A 64-bit double-precision floating-point number. Its magnitude - // is approximately +/-10^(+/-300) and it has 16 significant digits of - // precision. - double double_value = 3; - - // A variable-length string value. - string string_value = 4; - - // A distribution value. - google.api.Distribution distribution_value = 5; - } -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto deleted file mode 100644 index 9a184a1..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto +++ /dev/null @@ -1,247 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_entities.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_error_details.proto"; -import "google/cloud/bigquery/migration/v2alpha/migration_metrics.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "MigrationServiceProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Service to handle EDW migrations. -service MigrationService { - option (google.api.default_host) = "bigquerymigration.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a migration workflow. - rpc CreateMigrationWorkflow(CreateMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - post: "/v2alpha/{parent=projects/*/locations/*}/workflows" - body: "migration_workflow" - }; - option (google.api.method_signature) = "parent,migration_workflow"; - } - - // Gets a previously created migration workflow. - rpc GetMigrationWorkflow(GetMigrationWorkflowRequest) returns (MigrationWorkflow) { - option (google.api.http) = { - get: "/v2alpha/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration workflow. - rpc ListMigrationWorkflows(ListMigrationWorkflowsRequest) returns (ListMigrationWorkflowsResponse) { - option (google.api.http) = { - get: "/v2alpha/{parent=projects/*/locations/*}/workflows" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a migration workflow by name. - rpc DeleteMigrationWorkflow(DeleteMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2alpha/{name=projects/*/locations/*/workflows/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Starts a previously created migration workflow. I.e., the state transitions - // from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - // An error will be signaled if the state is anything other than DRAFT or - // RUNNING. - rpc StartMigrationWorkflow(StartMigrationWorkflowRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2alpha/{name=projects/*/locations/*/workflows/*}:start" - body: "*" - }; - option (google.api.method_signature) = "name"; - } - - // Gets a previously created migration subtask. - rpc GetMigrationSubtask(GetMigrationSubtaskRequest) returns (MigrationSubtask) { - option (google.api.http) = { - get: "/v2alpha/{name=projects/*/locations/*/workflows/*/subtasks/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists previously created migration subtasks. - rpc ListMigrationSubtasks(ListMigrationSubtasksRequest) returns (ListMigrationSubtasksResponse) { - option (google.api.http) = { - get: "/v2alpha/{parent=projects/*/locations/*/workflows/*}/subtasks" - }; - option (google.api.method_signature) = "parent"; - } -} - -// Request to create a migration workflow resource. -message CreateMigrationWorkflowRequest { - // Required. The name of the project to which this migration workflow belongs. - // Example: `projects/foo/locations/bar` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // Required. The migration workflow to create. - MigrationWorkflow migration_workflow = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to get a previously created migration workflow. -message GetMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; -} - -// A request to list previously created migration workflows. -message ListMigrationWorkflowsRequest { - // Required. The project and location of the migration workflows to list. - // Example: `projects/123/locations/us` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "locations.googleapis.com/Location" - } - ]; - - // The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2; - - // The maximum number of migration workflows to return. The service may return - // fewer than this number. - int32 page_size = 3; - - // A page token, received from previous `ListMigrationWorkflows` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationWorkflows` - // must match the call that provided the page token. - string page_token = 4; -} - -// Response object for a `ListMigrationWorkflows` call. -message ListMigrationWorkflowsResponse { - // The migration workflows for the specified project / location. - repeated MigrationWorkflow migration_workflows = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a previously created migration workflow. -message DeleteMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to start a previously created migration workflow. -message StartMigrationWorkflowRequest { - // Required. The unique identifier for the migration workflow. - // Example: `projects/123/locations/us/workflows/1234` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; -} - -// A request to get a previously created migration subtasks. -message GetMigrationSubtaskRequest { - // Required. The unique identifier for the migration subtask. - // Example: `projects/123/locations/us/workflows/1234/subtasks/543` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationSubtask" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to list previously created migration subtasks. -message ListMigrationSubtasksRequest { - // Required. The migration task of the subtasks to list. - // Example: `projects/123/locations/us/workflows/1234` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "bigquerymigration.googleapis.com/MigrationWorkflow" - } - ]; - - // Optional. The list of fields to be retrieved. - google.protobuf.FieldMask read_mask = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of migration tasks to return. The service may return - // fewer than this number. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token, received from previous `ListMigrationSubtasks` call. - // Provide this to retrieve the subsequent page. - // - // When paginating, all other parameters provided to `ListMigrationSubtasks` - // must match the call that provided the page token. - string page_token = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The filter to apply. This can be used to get the subtasks of a specific - // tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - // task ID (not the name in the named map). - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response object for a `ListMigrationSubtasks` call. -message ListMigrationSubtasksResponse { - // The migration subtasks for the specified task. - repeated MigrationSubtask migration_subtasks = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} diff --git a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto b/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto deleted file mode 100644 index bf4b27e..0000000 --- a/owl-bot-staging/v2alpha/protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.bigquery.migration.v2alpha; - -option csharp_namespace = "Google.Cloud.BigQuery.Migration.V2Alpha"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/migration/v2alpha;migration"; -option java_multiple_files = true; -option java_outer_classname = "TranslationTaskProto"; -option java_package = "com.google.cloud.bigquery.migration.v2alpha"; -option php_namespace = "Google\\Cloud\\BigQuery\\Migration\\V2alpha"; - -// Mapping between an input and output file to be translated in a subtask. -message TranslationFileMapping { - // The Cloud Storage path for a file to translation in a subtask. - string input_path = 1; - - // The Cloud Storage path to write back the corresponding input file to. - string output_path = 2; -} - -// The translation task config to capture necessary settings for a translation -// task and subtask. -message TranslationTaskDetails { - // The file encoding types. - enum FileEncoding { - // File encoding setting is not specified. - FILE_ENCODING_UNSPECIFIED = 0; - - // File encoding is UTF_8. - UTF_8 = 1; - - // File encoding is ISO_8859_1. - ISO_8859_1 = 2; - - // File encoding is US_ASCII. - US_ASCII = 3; - - // File encoding is UTF_16. - UTF_16 = 4; - - // File encoding is UTF_16LE. - UTF_16LE = 5; - - // File encoding is UTF_16BE. - UTF_16BE = 6; - } - - // The special token data type. - enum TokenType { - // Token type is not specified. - TOKEN_TYPE_UNSPECIFIED = 0; - - // Token type as string. - STRING = 1; - - // Token type as integer. - INT64 = 2; - - // Token type as numeric. - NUMERIC = 3; - - // Token type as boolean. - BOOL = 4; - - // Token type as float. - FLOAT64 = 5; - - // Token type as date. - DATE = 6; - - // Token type as timestamp. - TIMESTAMP = 7; - } - - // The language specific settings for the translation task. - oneof language_options { - // The Teradata SQL specific settings for the translation task. - TeradataOptions teradata_options = 10; - - // The BTEQ specific settings for the translation task. - BteqOptions bteq_options = 11; - } - - // The Cloud Storage path for translation input files. - string input_path = 1; - - // The Cloud Storage path for translation output files. - string output_path = 2; - - // Cloud Storage files to be processed for translation. - repeated TranslationFileMapping file_paths = 12; - - // The Cloud Storage path to DDL files as table schema to assist semantic - // translation. - string schema_path = 3; - - // The file encoding type. - FileEncoding file_encoding = 4; - - // The settings for SQL identifiers. - IdentifierSettings identifier_settings = 5; - - // The map capturing special tokens to be replaced during translation. The key - // is special token in string. The value is the token data type. This is used - // to translate SQL query template which contains special token as place - // holder. The special token makes a query invalid to parse. This map will be - // applied to annotate those special token with types to let parser understand - // how to parse them into proper structure with type information. - map special_token_map = 6; - - // The filter applied to translation details. - Filter filter = 7; - - // Specifies the exact name of the bigquery table ("dataset.table") to be used - // for surfacing raw translation errors. If the table does not exist, we will - // create it. If it already exists and the schema is the same, we will re-use. - // If the table exists and the schema is different, we will throw an error. - string translation_exception_table = 13; -} - -// The filter applied to fields of translation details. -message Filter { - // The list of prefixes used to exclude processing for input files. - repeated string input_file_exclusion_prefixes = 1; -} - -// Settings related to SQL identifiers. -message IdentifierSettings { - // The identifier case type. - enum IdentifierCase { - // The identifier case is not specified. - IDENTIFIER_CASE_UNSPECIFIED = 0; - - // Identifiers' cases will be kept as the original cases. - ORIGINAL = 1; - - // Identifiers will be in upper cases. - UPPER = 2; - - // Identifiers will be in lower cases. - LOWER = 3; - } - - // The SQL identifier rewrite mode. - enum IdentifierRewriteMode { - // SQL Identifier rewrite mode is unspecified. - IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0; - - // SQL identifiers won't be rewrite. - NONE = 1; - - // All SQL identifiers will be rewrite. - REWRITE_ALL = 2; - } - - // The setting to control output queries' identifier case. - IdentifierCase output_identifier_case = 1; - - // Specifies the rewrite mode for SQL identifiers. - IdentifierRewriteMode identifier_rewrite_mode = 2; -} - -// Teradata SQL specific translation task related settings. -message TeradataOptions { - -} - -// BTEQ translation task related settings. -message BteqOptions { - // Specifies the project and dataset in BigQuery that will be used for - // external table creation during the translation. - DatasetReference project_dataset = 1; - - // The Cloud Storage location to be used as the default path for files that - // are not otherwise specified in the file replacement map. - string default_path_uri = 2; - - // Maps the local paths that are used in BTEQ scripts (the keys) to the paths - // in Cloud Storage that should be used in their stead in the translation (the - // value). - map file_replacement_map = 3; -} - -// Reference to a BigQuery dataset. -message DatasetReference { - // A unique ID for this dataset, without the project name. The ID - // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). - // The maximum length is 1,024 characters. - string dataset_id = 1; - - // The ID of the project containing this dataset. - string project_id = 2; -} diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js deleted file mode 100644 index d8613fe..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.create_migration_workflow.js +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, migrationWorkflow) { - // [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - */ - // const parent = 'abc123' - /** - * Required. The migration workflow to create. - */ - // const migrationWorkflow = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callCreateMigrationWorkflow() { - // Construct request - const request = { - parent, - migrationWorkflow, - }; - - // Run request - const response = await migrationClient.createMigrationWorkflow(request); - console.log(response); - } - - callCreateMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js deleted file mode 100644 index e17c7ee..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.delete_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callDeleteMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.deleteMigrationWorkflow(request); - console.log(response); - } - - callDeleteMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js deleted file mode 100644 index a17add1..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_subtask.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - */ - // const name = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationSubtask() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationSubtask(request); - console.log(response); - } - - callGetMigrationSubtask(); - // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js deleted file mode 100644 index 1519196..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.get_migration_workflow.js +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callGetMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.getMigrationWorkflow(request); - console.log(response); - } - - callGetMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js deleted file mode 100644 index 99419dd..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_subtasks.js +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const parent = 'abc123' - /** - * Optional. The list of fields to be retrieved. - */ - // const readMask = {} - /** - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - /** - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - */ - // const filter = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationSubtasks() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationSubtasksAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationSubtasks(); - // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js deleted file mode 100644 index 31c0263..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.list_migration_workflows.js +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - */ - // const parent = 'abc123' - /** - * The list of fields to be retrieved. - */ - // const readMask = {} - /** - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - */ - // const pageSize = 1234 - /** - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - */ - // const pageToken = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callListMigrationWorkflows() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await migrationClient.listMigrationWorkflowsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListMigrationWorkflows(); - // [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js b/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js deleted file mode 100644 index 85288c6..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/migration_service.start_migration_workflow.js +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] - /** - * This snippet has been automatically generated and should be regarded as a code template only. - * It will require modifications to work. - * It may require correct/in-range values for request initialization. - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - */ - // const name = 'abc123' - - // Imports the Migration library - const {MigrationServiceClient} = require('@google-cloud/bigquery-migration').v2alpha; - - // Instantiates a client - const migrationClient = new MigrationServiceClient(); - - async function callStartMigrationWorkflow() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await migrationClient.startMigrationWorkflow(request); - console.log(response); - } - - callStartMigrationWorkflow(); - // [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json deleted file mode 100644 index c5336c2..0000000 --- a/owl-bot-staging/v2alpha/samples/generated/v2alpha/snippet_metadata.google.cloud.bigquery.migration.v2alpha.json +++ /dev/null @@ -1,335 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-migration", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2alpha", - "version": "v2alpha" - } - ] - }, - "snippets": [ - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", - "title": "MigrationService createMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Creates a migration workflow.", - "canonical": true, - "file": "migration_service.create_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 59, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "migration_workflow", - "type": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "CreateMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", - "title": "MigrationService getMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration workflow.", - "canonical": true, - "file": "migration_service.get_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationWorkflow", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", - "title": "MigrationService listMigrationWorkflows Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration workflow.", - "canonical": true, - "file": "migration_service.list_migration_workflows.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 72, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationWorkflows", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", - "title": "MigrationService deleteMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Deletes a migration workflow by name.", - "canonical": true, - "file": "migration_service.delete_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "DeleteMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", - "title": "MigrationService startMigrationWorkflow Sample", - "origin": "API_DEFINITION", - "description": " Starts a previously created migration workflow. I.e., the state transitions from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. An error will be signaled if the state is anything other than DRAFT or RUNNING.", - "canonical": true, - "file": "migration_service.start_migration_workflow.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 54, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "StartMigrationWorkflow", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", - "title": "MigrationService getMigrationSubtask Sample", - "origin": "API_DEFINITION", - "description": " Gets a previously created migration subtask.", - "canonical": true, - "file": "migration_service.get_migration_subtask.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 58, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.MigrationSubtask", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "GetMigrationSubtask", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - }, - { - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", - "title": "MigrationService listMigrationSubtasks Sample", - "origin": "API_DEFINITION", - "description": " Lists previously created migration subtasks.", - "canonical": true, - "file": "migration_service.list_migration_subtasks.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 78, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "read_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksResponse", - "client": { - "shortName": "MigrationServiceClient", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationServiceClient" - }, - "method": { - "shortName": "ListMigrationSubtasks", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "service": { - "shortName": "MigrationService", - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v2alpha/src/index.ts b/owl-bot-staging/v2alpha/src/index.ts deleted file mode 100644 index 288e629..0000000 --- a/owl-bot-staging/v2alpha/src/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v2alpha from './v2alpha'; -const MigrationServiceClient = v2alpha.MigrationServiceClient; -type MigrationServiceClient = v2alpha.MigrationServiceClient; -export {v2alpha, MigrationServiceClient}; -export default {v2alpha, MigrationServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json b/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json deleted file mode 100644 index adf8d06..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/gapic_metadata.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.bigquery.migration.v2alpha", - "libraryPackage": "@google-cloud/bigquery-migration", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "createMigrationWorkflow" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "getMigrationWorkflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "deleteMigrationWorkflow" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "startMigrationWorkflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "getMigrationSubtask" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "listMigrationWorkflows", - "listMigrationWorkflowsStream", - "listMigrationWorkflowsAsync" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "listMigrationSubtasks", - "listMigrationSubtasksStream", - "listMigrationSubtasksAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/index.ts b/owl-bot-staging/v2alpha/src/v2alpha/index.ts deleted file mode 100644 index f75d208..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {MigrationServiceClient} from './migration_service_client'; diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts deleted file mode 100644 index d5105f9..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client.ts +++ /dev/null @@ -1,1256 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import type * as gax from 'google-gax'; -import type {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; -import {Transform} from 'stream'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v2alpha/migration_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './migration_service_client_config.json'; -const version = require('../../../package.json').version; - -/** - * Service to handle EDW migrations. - * @class - * @memberof v2alpha - */ -export class MigrationServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - migrationServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of MigrationServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean | "rest"} [options.fallback] - Use HTTP fallback mode. - * Pass "rest" to use HTTP/1.1 REST API instead of gRPC. - * For more information, please check the - * {@link https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#http11-rest-api-mode documentation}. - * @param {gax} [gaxInstance]: loaded instance of `google-gax`. Useful if you - * need to avoid loading the default gRPC version and want to use the fallback - * HTTP implementation. Load only fallback version and pass it to the constructor: - * ``` - * const gax = require('google-gax/build/src/fallback'); // avoids loading google-gax with gRPC - * const client = new MigrationServiceClient({fallback: 'rest'}, gax); - * ``` - */ - constructor(opts?: ClientOptions, gaxInstance?: typeof gax | typeof gax.fallback) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof MigrationServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Load google-gax module synchronously if needed - if (!gaxInstance) { - gaxInstance = require('google-gax') as typeof gax; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gaxInstance.fallback : gaxInstance; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - migrationSubtaskPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}' - ), - migrationWorkflowPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflows/{workflow}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listMigrationWorkflows: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationWorkflows'), - listMigrationSubtasks: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'migrationSubtasks') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.migration.v2alpha.MigrationService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = this._gaxModule.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.migrationServiceStub) { - return this.migrationServiceStub; - } - - // Put together the "service stub" for - // google.cloud.bigquery.migration.v2alpha.MigrationService. - this.migrationServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.migration.v2alpha.MigrationService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.migration.v2alpha.MigrationService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const migrationServiceStubMethods = - ['createMigrationWorkflow', 'getMigrationWorkflow', 'listMigrationWorkflows', 'deleteMigrationWorkflow', 'startMigrationWorkflow', 'getMigrationSubtask', 'listMigrationSubtasks']; - for (const methodName of migrationServiceStubMethods) { - const callPromise = this.migrationServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor, - this._opts.fallback - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.migrationServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'bigquerymigration.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates a migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The name of the project to which this migration workflow belongs. - * Example: `projects/foo/locations/bar` - * @param {google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} request.migrationWorkflow - * Required. The migration workflow to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.create_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async - */ - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - createMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.ICreateMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.get_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async - */ - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - getMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationWorkflow(request, options, callback); - } -/** - * Deletes a migration workflow by name. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.delete_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async - */ - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - deleteMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IDeleteMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteMigrationWorkflow(request, options, callback); - } -/** - * Starts a previously created migration workflow. I.e., the state transitions - * from DRAFT to RUNNING. This is a no-op if the state is already RUNNING. - * An error will be signaled if the state is anything other than DRAFT or - * RUNNING. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration workflow. - * Example: `projects/123/locations/us/workflows/1234` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.start_migration_workflow.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async - */ - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): void; - startMigrationWorkflow( - request?: protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.migration.v2alpha.IStartMigrationWorkflowRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.startMigrationWorkflow(request, options, callback); - } -/** - * Gets a previously created migration subtask. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The unique identifier for the migration subtask. - * Example: `projects/123/locations/us/workflows/1234/subtasks/543` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.get_migration_subtask.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async - */ - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - callback: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): void; - getMigrationSubtask( - request?: protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask, - protos.google.cloud.bigquery.migration.v2alpha.IGetMigrationSubtaskRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getMigrationSubtask(request, options, callback); - } - - /** - * Lists previously created migration workflow. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse - ]>; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; - listMigrationWorkflows( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): void; - listMigrationWorkflows( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationWorkflows(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationWorkflowsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationWorkflowsStream( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.createStream( - this.innerApiCalls.listMigrationWorkflows as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationWorkflows`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The project and location of the migration workflows to list. - * Example: `projects/123/locations/us` - * @param {google.protobuf.FieldMask} request.readMask - * The list of fields to be retrieved. - * @param {number} request.pageSize - * The maximum number of migration workflows to return. The service may return - * fewer than this number. - * @param {string} request.pageToken - * A page token, received from previous `ListMigrationWorkflows` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationWorkflows` - * must match the call that provided the page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationWorkflow]{@link google.cloud.bigquery.migration.v2alpha.MigrationWorkflow}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.list_migration_workflows.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async - */ - listMigrationWorkflowsAsync( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationWorkflowsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationWorkflows']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationWorkflows.asyncIterate( - this.innerApiCalls['listMigrationWorkflows'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - /** - * Lists previously created migration subtasks. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse - ]>; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; - listMigrationSubtasks( - request: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): void; - listMigrationSubtasks( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>, - callback?: PaginationCallback< - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse|null|undefined, - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask>): - Promise<[ - protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[], - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest|null, - protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listMigrationSubtasks(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listMigrationSubtasksAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listMigrationSubtasksStream( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.createStream( - this.innerApiCalls.listMigrationSubtasks as GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listMigrationSubtasks`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The migration task of the subtasks to list. - * Example: `projects/123/locations/us/workflows/1234` - * @param {google.protobuf.FieldMask} [request.readMask] - * Optional. The list of fields to be retrieved. - * @param {number} [request.pageSize] - * Optional. The maximum number of migration tasks to return. The service may return - * fewer than this number. - * @param {string} [request.pageToken] - * Optional. A page token, received from previous `ListMigrationSubtasks` call. - * Provide this to retrieve the subsequent page. - * - * When paginating, all other parameters provided to `ListMigrationSubtasks` - * must match the call that provided the page token. - * @param {string} [request.filter] - * Optional. The filter to apply. This can be used to get the subtasks of a specific - * tasks in a workflow, e.g. `migration_task = "ab012"` where `"ab012"` is the - * task ID (not the name in the named map). - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [MigrationSubtask]{@link google.cloud.bigquery.migration.v2alpha.MigrationSubtask}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v2alpha/migration_service.list_migration_subtasks.js - * region_tag:bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async - */ - listMigrationSubtasksAsync( - request?: protos.google.cloud.bigquery.migration.v2alpha.IListMigrationSubtasksRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = this._gaxModule.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listMigrationSubtasks']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listMigrationSubtasks.asyncIterate( - this.innerApiCalls['listMigrationSubtasks'] as GaxCall, - request as {}, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified migrationSubtask resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @param {string} subtask - * @returns {string} Resource name string. - */ - migrationSubtaskPath(project:string,location:string,workflow:string,subtask:string) { - return this.pathTemplates.migrationSubtaskPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - subtask: subtask, - }); - } - - /** - * Parse the project from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).project; - } - - /** - * Parse the location from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).location; - } - - /** - * Parse the workflow from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).workflow; - } - - /** - * Parse the subtask from MigrationSubtask resource. - * - * @param {string} migrationSubtaskName - * A fully-qualified path representing MigrationSubtask resource. - * @returns {string} A string representing the subtask. - */ - matchSubtaskFromMigrationSubtaskName(migrationSubtaskName: string) { - return this.pathTemplates.migrationSubtaskPathTemplate.match(migrationSubtaskName).subtask; - } - - /** - * Return a fully-qualified migrationWorkflow resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow - * @returns {string} Resource name string. - */ - migrationWorkflowPath(project:string,location:string,workflow:string) { - return this.pathTemplates.migrationWorkflowPathTemplate.render({ - project: project, - location: location, - workflow: workflow, - }); - } - - /** - * Parse the project from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the project. - */ - matchProjectFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).project; - } - - /** - * Parse the location from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the location. - */ - matchLocationFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).location; - } - - /** - * Parse the workflow from MigrationWorkflow resource. - * - * @param {string} migrationWorkflowName - * A fully-qualified path representing MigrationWorkflow resource. - * @returns {string} A string representing the workflow. - */ - matchWorkflowFromMigrationWorkflowName(migrationWorkflowName: string) { - return this.pathTemplates.migrationWorkflowPathTemplate.match(migrationWorkflowName).workflow; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.migrationServiceStub && !this._terminated) { - return this.migrationServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json deleted file mode 100644 index 2184b83..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_client_config.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "interfaces": { - "google.cloud.bigquery.migration.v2alpha.MigrationService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "ce5b960a6ed052e690863808e4f0deff3dc7d49f": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 10000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetMigrationWorkflow": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationWorkflows": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "DeleteMigrationWorkflow": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartMigrationWorkflow": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "GetMigrationSubtask": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - }, - "ListMigrationSubtasks": { - "timeout_millis": 120000, - "retry_codes_name": "unavailable", - "retry_params_name": "ce5b960a6ed052e690863808e4f0deff3dc7d49f" - } - } - } - } -} diff --git a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json b/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json deleted file mode 100644 index 8e91e42..0000000 --- a/owl-bot-staging/v2alpha/src/v2alpha/migration_service_proto_list.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - "../../protos/google/cloud/bigquery/migration/v2alpha/assessment_task.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_entities.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_error_details.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_metrics.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/migration_service.proto", - "../../protos/google/cloud/bigquery/migration/v2alpha/translation_task.proto" -] diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js deleted file mode 100644 index ecc7e4b..0000000 --- a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const migration = require('@google-cloud/bigquery-migration'); - -function main() { - const migrationServiceClient = new migration.MigrationServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 80fbe2d..0000000 --- a/owl-bot-staging/v2alpha/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {MigrationServiceClient} from '@google-cloud/bigquery-migration'; - -// check that the client class type name can be used -function doStuffWithMigrationServiceClient(client: MigrationServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const migrationServiceClient = new MigrationServiceClient(); - doStuffWithMigrationServiceClient(migrationServiceClient); -} - -main(); diff --git a/owl-bot-staging/v2alpha/system-test/install.ts b/owl-bot-staging/v2alpha/system-test/install.ts deleted file mode 100644 index 557a575..0000000 --- a/owl-bot-staging/v2alpha/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {packNTest} from 'pack-n-play'; -import {readFileSync} from 'fs'; -import {describe, it} from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts b/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts deleted file mode 100644 index 99aac57..0000000 --- a/owl-bot-staging/v2alpha/test/gapic_migration_service_v2alpha.ts +++ /dev/null @@ -1,1256 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import {describe, it} from 'mocha'; -import * as migrationserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v2alpha.MigrationServiceClient', () => { - describe('Common methods', () => { - it('has servicePath', () => { - const servicePath = migrationserviceModule.v2alpha.MigrationServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = migrationserviceModule.v2alpha.MigrationServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = migrationserviceModule.v2alpha.MigrationServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - await client.initialize(); - assert(client.migrationServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.migrationServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.migrationServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - }); - - describe('createMigrationWorkflow', () => { - it('invokes createMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.createMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.createMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.CreateMigrationWorkflowRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationWorkflow', () => { - it('invokes getMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.getMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationWorkflow(request), expectedError); - }); - }); - - describe('deleteMigrationWorkflow', () => { - it('invokes deleteMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.deleteMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.deleteMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.DeleteMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteMigrationWorkflow(request), expectedError); - }); - }); - - describe('startMigrationWorkflow', () => { - it('invokes startMigrationWorkflow without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(expectedResponse); - const [response] = await client.startMigrationWorkflow(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startMigrationWorkflow( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startMigrationWorkflow with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startMigrationWorkflow = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - assert((client.innerApiCalls.startMigrationWorkflow as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startMigrationWorkflow with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.StartMigrationWorkflowRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.startMigrationWorkflow(request), expectedError); - }); - }); - - describe('getMigrationSubtask', () => { - it('invokes getMigrationSubtask without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(expectedResponse); - const [response] = await client.getMigrationSubtask(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()); - client.innerApiCalls.getMigrationSubtask = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getMigrationSubtask( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getMigrationSubtask with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getMigrationSubtask = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - assert((client.innerApiCalls.getMigrationSubtask as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getMigrationSubtask with closed client', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.GetMigrationSubtaskRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getMigrationSubtask(request), expectedError); - }); - }); - - describe('listMigrationWorkflows', () => { - it('invokes listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationWorkflows(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflows without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.innerApiCalls.listMigrationWorkflows = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationWorkflows( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationWorkflows = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationWorkflows(request), expectedError); - assert((client.innerApiCalls.listMigrationWorkflows as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationWorkflowsStream without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationWorkflowsStream with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationWorkflowsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationWorkflows, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationWorkflow()), - ]; - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; - const iterable = client.listMigrationWorkflowsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationWorkflows with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationWorkflowsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationWorkflows.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationWorkflowsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationWorkflow[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationWorkflows.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('listMigrationSubtasks', () => { - it('invokes listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(expectedResponse); - const [response] = await client.listMigrationSubtasks(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasks without error using callback', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.innerApiCalls.listMigrationSubtasks = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listMigrationSubtasks( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listMigrationSubtasks = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listMigrationSubtasks(request), expectedError); - assert((client.innerApiCalls.listMigrationSubtasks as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listMigrationSubtasksStream without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listMigrationSubtasksStream with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listMigrationSubtasksStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listMigrationSubtasks, request)); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks without error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.MigrationSubtask()), - ]; - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; - const iterable = client.listMigrationSubtasksAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listMigrationSubtasks with error', async () => { - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.migration.v2alpha.ListMigrationSubtasksRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listMigrationSubtasks.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listMigrationSubtasksAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.migration.v2alpha.IMigrationSubtask[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listMigrationSubtasks.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationSubtask', () => { - const fakePath = "/rendered/path/migrationSubtask"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - subtask: "subtaskValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationSubtaskPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationSubtaskPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationSubtaskPath', () => { - const result = client.migrationSubtaskPath("projectValue", "locationValue", "workflowValue", "subtaskValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationSubtaskPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationSubtaskName', () => { - const result = client.matchProjectFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationSubtaskName', () => { - const result = client.matchLocationFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationSubtaskName', () => { - const result = client.matchWorkflowFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchSubtaskFromMigrationSubtaskName', () => { - const result = client.matchSubtaskFromMigrationSubtaskName(fakePath); - assert.strictEqual(result, "subtaskValue"); - assert((client.pathTemplates.migrationSubtaskPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('migrationWorkflow', () => { - const fakePath = "/rendered/path/migrationWorkflow"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow: "workflowValue", - }; - const client = new migrationserviceModule.v2alpha.MigrationServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.migrationWorkflowPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.migrationWorkflowPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('migrationWorkflowPath', () => { - const result = client.migrationWorkflowPath("projectValue", "locationValue", "workflowValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.migrationWorkflowPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromMigrationWorkflowName', () => { - const result = client.matchProjectFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromMigrationWorkflowName', () => { - const result = client.matchLocationFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowFromMigrationWorkflowName', () => { - const result = client.matchWorkflowFromMigrationWorkflowName(fakePath); - assert.strictEqual(result, "workflowValue"); - assert((client.pathTemplates.migrationWorkflowPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v2alpha/tsconfig.json b/owl-bot-staging/v2alpha/tsconfig.json deleted file mode 100644 index c78f1c8..0000000 --- a/owl-bot-staging/v2alpha/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v2alpha/webpack.config.js b/owl-bot-staging/v2alpha/webpack.config.js deleted file mode 100644 index 9178c4b..0000000 --- a/owl-bot-staging/v2alpha/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'MigrationService', - filename: './migration-service.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -};