From 1b964ab788b2f0a9cd51e46af204d9732821a8b5 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 19 Jun 2020 20:20:10 -0700 Subject: [PATCH] chore: update templates (#27) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/1ba94aac-0bb7-4b94-aba5-0c00b038713e/targets Source-Link: https://github.com/googleapis/synthtool/commit/4e1d2cb79b02d7496b1452f91c518630c207145e Source-Link: https://github.com/googleapis/synthtool/commit/e99975b6b49827b8720f0a885e218dbdb67849ca Source-Link: https://github.com/googleapis/synthtool/commit/ffe10407ee2f261c799fb0d01bf32a8abc67ed1e Source-Link: https://github.com/googleapis/synthtool/commit/71b8a272549c06b5768d00fa48d3ae990e871bec PiperOrigin-RevId: 313460921 Source-Link: https://github.com/googleapis/googleapis/commit/c4e37010d74071851ff24121f522e802231ac86e PiperOrigin-RevId: 312689208 Source-Link: https://github.com/googleapis/googleapis/commit/dec3204175104cef49bf21d685d5517caaf0058f Source-Link: https://github.com/googleapis/synthtool/commit/d2364eb80b840a36136c8ce12f1c6efabcc9600e PiperOrigin-RevId: 312101156 Source-Link: https://github.com/googleapis/googleapis/commit/d1a9f02fd4fb263bae0383b4a5af0bbef33753d6 PiperOrigin-RevId: 312088359 Source-Link: https://github.com/googleapis/googleapis/commit/5a90d467aa65e7f038f87585e8fbb45d74475e7c Source-Link: https://github.com/googleapis/synthtool/commit/7482e79a82e353248769d819788adc1213e8c207 Source-Link: https://github.com/googleapis/synthtool/commit/09c48461232ce929c34386259eb59018ad2d8eef PiperOrigin-RevId: 309824146 Source-Link: https://github.com/googleapis/googleapis/commit/e0f9d9e1f9de890db765be46f45ca8490723e3eb --- .../.flake8 | 2 + .../.gitignore | 2 + .../.kokoro/publish-docs.sh | 2 - .../.kokoro/release.sh | 2 - .../.kokoro/samples/lint/common.cfg | 34 + .../.kokoro/samples/lint/continuous.cfg | 6 + .../.kokoro/samples/lint/periodic.cfg | 6 + .../.kokoro/samples/lint/presubmit.cfg | 6 + .../.kokoro/samples/python3.6/common.cfg | 34 + .../.kokoro/samples/python3.6/continuous.cfg | 7 + .../.kokoro/samples/python3.6/periodic.cfg | 6 + .../.kokoro/samples/python3.6/presubmit.cfg | 6 + .../.kokoro/samples/python3.7/common.cfg | 34 + .../.kokoro/samples/python3.7/continuous.cfg | 6 + .../.kokoro/samples/python3.7/periodic.cfg | 6 + .../.kokoro/samples/python3.7/presubmit.cfg | 6 + .../.kokoro/samples/python3.8/common.cfg | 34 + .../.kokoro/samples/python3.8/continuous.cfg | 6 + .../.kokoro/samples/python3.8/periodic.cfg | 6 + .../.kokoro/samples/python3.8/presubmit.cfg | 6 + .../.kokoro/test-samples.sh | 104 +++ .../MANIFEST.in | 3 + .../docs/conf.py | 5 +- .../docs/index.rst | 1 + .../docs/multiprocessing.rst | 7 + .../gapic/data_transfer_service_client.py | 508 ++++++----- .../data_transfer_service_client_config.py | 30 +- .../bigquery_datatransfer_v1/gapic/enums.py | 4 +- .../data_transfer_service_grpc_transport.py | 100 +-- .../proto/datatransfer_pb2.py | 793 ++++++++++-------- .../proto/datatransfer_pb2_grpc.py | 108 +-- .../proto/transfer_pb2.py | 265 ++++-- .../noxfile.py | 22 +- .../scripts/decrypt-secrets.sh | 33 + .../scripts/readme-gen/readme_gen.py | 66 ++ .../readme-gen/templates/README.tmpl.rst | 87 ++ .../readme-gen/templates/auth.tmpl.rst | 9 + .../templates/auth_api_key.tmpl.rst | 14 + .../templates/install_deps.tmpl.rst | 29 + .../templates/install_portaudio.tmpl.rst | 35 + .../synth.metadata | 18 +- .../synth.py | 3 + .../testing/.gitignore | 3 + .../test_data_transfer_service_client_v1.py | 204 ++--- 44 files changed, 1731 insertions(+), 937 deletions(-) create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/common.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/continuous.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/periodic.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/presubmit.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/common.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/continuous.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/periodic.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/presubmit.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/common.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/continuous.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/periodic.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/presubmit.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/common.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/continuous.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/periodic.cfg create mode 100644 packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/presubmit.cfg create mode 100755 packages/google-cloud-bigquery-datatransfer/.kokoro/test-samples.sh create mode 100644 packages/google-cloud-bigquery-datatransfer/docs/multiprocessing.rst create mode 100755 packages/google-cloud-bigquery-datatransfer/scripts/decrypt-secrets.sh create mode 100644 packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/readme_gen.py create mode 100644 packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/README.tmpl.rst create mode 100644 packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth.tmpl.rst create mode 100644 packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth_api_key.tmpl.rst create mode 100644 packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_deps.tmpl.rst create mode 100644 packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_portaudio.tmpl.rst create mode 100644 packages/google-cloud-bigquery-datatransfer/testing/.gitignore diff --git a/packages/google-cloud-bigquery-datatransfer/.flake8 b/packages/google-cloud-bigquery-datatransfer/.flake8 index 20fe9bda2ee4..ed9316381c9c 100644 --- a/packages/google-cloud-bigquery-datatransfer/.flake8 +++ b/packages/google-cloud-bigquery-datatransfer/.flake8 @@ -21,6 +21,8 @@ exclude = # Exclude generated code. **/proto/** **/gapic/** + **/services/** + **/types/** *_pb2.py # Standard linting exemptions. diff --git a/packages/google-cloud-bigquery-datatransfer/.gitignore b/packages/google-cloud-bigquery-datatransfer/.gitignore index 3fb06e09ce74..b87e1ed580d9 100644 --- a/packages/google-cloud-bigquery-datatransfer/.gitignore +++ b/packages/google-cloud-bigquery-datatransfer/.gitignore @@ -10,6 +10,7 @@ dist build eggs +.eggs parts bin var @@ -49,6 +50,7 @@ bigquery/docs/generated # Virtual environment env/ coverage.xml +sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/publish-docs.sh b/packages/google-cloud-bigquery-datatransfer/.kokoro/publish-docs.sh index ea468a90c510..3317cda32388 100755 --- a/packages/google-cloud-bigquery-datatransfer/.kokoro/publish-docs.sh +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/publish-docs.sh @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Disable buffering, so that the logs stream through. diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/release.sh b/packages/google-cloud-bigquery-datatransfer/.kokoro/release.sh index 3979c4985b04..76c8ed2f9579 100755 --- a/packages/google-cloud-bigquery-datatransfer/.kokoro/release.sh +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/release.sh @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Start the releasetool reporter diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/common.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/common.cfg new file mode 100644 index 000000000000..2072342e79af --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "lint" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-datatransfer/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/continuous.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/periodic.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/presubmit.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/lint/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/common.cfg new file mode 100644 index 000000000000..84052e6fa8a4 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.6" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-datatransfer/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/continuous.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/continuous.cfg new file mode 100644 index 000000000000..7218af1499e5 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/continuous.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/periodic.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/presubmit.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.6/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/common.cfg new file mode 100644 index 000000000000..147291a2ee59 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.7" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-datatransfer/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/continuous.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/periodic.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/presubmit.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.7/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/common.cfg new file mode 100644 index 000000000000..b447948a038a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.8" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-datatransfer/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/continuous.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/periodic.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/presubmit.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/samples/python3.8/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/test-samples.sh b/packages/google-cloud-bigquery-datatransfer/.kokoro/test-samples.sh new file mode 100755 index 000000000000..e5eb712a11c2 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/test-samples.sh @@ -0,0 +1,104 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-bigquery-datatransfer + +# Run periodic samples tests at latest release +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + LATEST_RELEASE=$(git describe --abbrev=0 --tags) + git checkout $LATEST_RELEASE +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the Build Cop Bot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop + $KOKORO_GFILE_DIR/linux_amd64/buildcop + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/MANIFEST.in b/packages/google-cloud-bigquery-datatransfer/MANIFEST.in index 68855abc3f02..e9e29d12033d 100644 --- a/packages/google-cloud-bigquery-datatransfer/MANIFEST.in +++ b/packages/google-cloud-bigquery-datatransfer/MANIFEST.in @@ -20,3 +20,6 @@ recursive-include google *.json *.proto recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/docs/conf.py b/packages/google-cloud-bigquery-datatransfer/docs/conf.py index fa79a50ebc18..99bd11353f23 100644 --- a/packages/google-cloud-bigquery-datatransfer/docs/conf.py +++ b/packages/google-cloud-bigquery-datatransfer/docs/conf.py @@ -38,6 +38,7 @@ "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags @@ -49,10 +50,6 @@ # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] diff --git a/packages/google-cloud-bigquery-datatransfer/docs/index.rst b/packages/google-cloud-bigquery-datatransfer/docs/index.rst index 942e2634213c..fa973e38cbba 100644 --- a/packages/google-cloud-bigquery-datatransfer/docs/index.rst +++ b/packages/google-cloud-bigquery-datatransfer/docs/index.rst @@ -1,5 +1,6 @@ .. include:: README.rst +.. include:: multiprocessing.rst API Reference ------------- diff --git a/packages/google-cloud-bigquery-datatransfer/docs/multiprocessing.rst b/packages/google-cloud-bigquery-datatransfer/docs/multiprocessing.rst new file mode 100644 index 000000000000..1cb29d4ca967 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpcio` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py index 3bd2b909e274..c0de8302d44a 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py @@ -304,6 +304,220 @@ def __init__( self._inner_api_calls = {} # Service calls + def delete_transfer_config( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes a data transfer configuration, + including any associated transfer runs and logs. + + Example: + >>> from google.cloud import bigquery_datatransfer_v1 + >>> + >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() + >>> + >>> name = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') + >>> + >>> client.delete_transfer_config(name) + + Args: + name (str): Required. The field will contain name of the resource requested, for + example: ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_transfer_config" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_transfer_config" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_transfer_config, + default_retry=self._method_configs["DeleteTransferConfig"].retry, + default_timeout=self._method_configs["DeleteTransferConfig"].timeout, + client_info=self._client_info, + ) + + request = datatransfer_pb2.DeleteTransferConfigRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + self._inner_api_calls["delete_transfer_config"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_transfer_run( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes the specified transfer run. + + Example: + >>> from google.cloud import bigquery_datatransfer_v1 + >>> + >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() + >>> + >>> name = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') + >>> + >>> client.delete_transfer_run(name) + + Args: + name (str): Required. The field will contain name of the resource requested, for + example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_transfer_run" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_transfer_run" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_transfer_run, + default_retry=self._method_configs["DeleteTransferRun"].retry, + default_timeout=self._method_configs["DeleteTransferRun"].timeout, + client_info=self._client_info, + ) + + request = datatransfer_pb2.DeleteTransferRunRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + self._inner_api_calls["delete_transfer_run"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def check_valid_creds( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Returns true if valid credentials exist for the given data source and + requesting user. + Some data sources doesn't support service account, so we need to talk to + them on behalf of the end user. This API just checks whether we have OAuth + token for the particular user, which is a pre-requisite before user can + create a transfer config. + + Example: + >>> from google.cloud import bigquery_datatransfer_v1 + >>> + >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() + >>> + >>> name = client.project_data_source_path('[PROJECT]', '[DATA_SOURCE]') + >>> + >>> response = client.check_valid_creds(name) + + Args: + name (str): Required. The data source in the form: + ``projects/{project_id}/dataSources/{data_source_id}`` or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "check_valid_creds" not in self._inner_api_calls: + self._inner_api_calls[ + "check_valid_creds" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.check_valid_creds, + default_retry=self._method_configs["CheckValidCreds"].retry, + default_timeout=self._method_configs["CheckValidCreds"].timeout, + client_info=self._client_info, + ) + + request = datatransfer_pb2.CheckValidCredsRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["check_valid_creds"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + def get_data_source( self, name, @@ -412,7 +626,7 @@ def list_data_sources( Args: parent (str): Required. The BigQuery project id for which data sources should be returned. Must be in the form: ``projects/{project_id}`` or - \`projects/{project\_id}/locations/{location\_id} + \`projects/{project_id}/locations/{location_id} page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -511,36 +725,35 @@ def create_transfer_config( Args: parent (str): Required. The BigQuery project id where the transfer configuration should be created. Must be in the format - projects/{project\_id}/locations/{location\_id} or - projects/{project\_id}. If specified location and location of the - destination bigquery dataset do not match - the request will fail. + projects/{project_id}/locations/{location_id} or projects/{project_id}. + If specified location and location of the destination bigquery dataset + do not match - the request will fail. transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Required. Data transfer configuration to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` authorization_code (str): Optional OAuth2 authorization code to use with this transfer configuration. This is required if new credentials are needed, as - indicated by ``CheckValidCreds``. In order to obtain - authorization\_code, please make a request to - https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client\_id=&scope=&redirect\_uri= - - - client\_id should be OAuth client\_id of BigQuery DTS API for the - given data source returned by ListDataSources method. - - data\_source\_scopes are the scopes returned by ListDataSources - method. - - redirect\_uri is an optional parameter. If not specified, then + indicated by ``CheckValidCreds``. In order to obtain authorization_code, + please make a request to + https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= + + - client_id should be OAuth client_id of BigQuery DTS API for the given + data source returned by ListDataSources method. + - data_source_scopes are the scopes returned by ListDataSources method. + - redirect_uri is an optional parameter. If not specified, then authorization code is posted to the opener of authorization flow window. Otherwise it will be sent to the redirect uri. A special value of urn:ietf:wg:oauth:2.0:oob means that authorization code should be returned in the title bar of the browser, with the page text prompting the user to copy the code and paste it in the application. - version_info (str): Optional version info. If users want to find a very recent access token, - that is, immediately after approving access, users have to set the - version\_info claim in the token request. To obtain the version\_info, + version_info (str): Optional version info. If users want to find a very recent access + token, that is, immediately after approving access, users have to set + the version_info claim in the token request. To obtain the version_info, users must use the "none+gsession" response type. which be return a - version\_info back in the authorization response which be be put in a - JWT claim in the token request. + version_info back in the authorization response which be be put in a JWT + claim in the token request. service_account_name (str): Optional service account name. If this field is set, transfer config will be created with this service account credentials. It requires that requesting user calling this API has permissions to act as this service @@ -639,28 +852,27 @@ def update_transfer_config( authorization_code (str): Optional OAuth2 authorization code to use with this transfer configuration. If it is provided, the transfer configuration will be associated with the authorizing user. In order to obtain - authorization\_code, please make a request to - https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client\_id=&scope=&redirect\_uri= - - - client\_id should be OAuth client\_id of BigQuery DTS API for the - given data source returned by ListDataSources method. - - data\_source\_scopes are the scopes returned by ListDataSources - method. - - redirect\_uri is an optional parameter. If not specified, then + authorization_code, please make a request to + https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= + + - client_id should be OAuth client_id of BigQuery DTS API for the given + data source returned by ListDataSources method. + - data_source_scopes are the scopes returned by ListDataSources method. + - redirect_uri is an optional parameter. If not specified, then authorization code is posted to the opener of authorization flow window. Otherwise it will be sent to the redirect uri. A special value of urn:ietf:wg:oauth:2.0:oob means that authorization code should be returned in the title bar of the browser, with the page text prompting the user to copy the code and paste it in the application. - version_info (str): Optional version info. If users want to find a very recent access token, - that is, immediately after approving access, users have to set the - version\_info claim in the token request. To obtain the version\_info, + version_info (str): Optional version info. If users want to find a very recent access + token, that is, immediately after approving access, users have to set + the version_info claim in the token request. To obtain the version_info, users must use the "none+gsession" response type. which be return a - version\_info back in the authorization response which be be put in a - JWT claim in the token request. + version_info back in the authorization response which be be put in a JWT + claim in the token request. service_account_name (str): Optional service account name. If this field is set and - "service\_account\_name" is set in update\_mask, transfer config will be + "service_account_name" is set in update_mask, transfer config will be updated to use this service account credentials. It requires that requesting user calling this API has permissions to act as this service account. @@ -718,75 +930,6 @@ def update_transfer_config( request, retry=retry, timeout=timeout, metadata=metadata ) - def delete_transfer_config( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a data transfer configuration, - including any associated transfer runs and logs. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') - >>> - >>> client.delete_transfer_config(name) - - Args: - name (str): Required. The field will contain name of the resource requested, for - example: ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_transfer_config" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_transfer_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_transfer_config, - default_retry=self._method_configs["DeleteTransferConfig"].retry, - default_timeout=self._method_configs["DeleteTransferConfig"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.DeleteTransferConfigRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_transfer_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - def get_transfer_config( self, name, @@ -975,7 +1118,7 @@ def schedule_transfer_runs( metadata=None, ): """ - Creates transfer runs for a time range [start\_time, end\_time]. For + Creates transfer runs for a time range [start_time, end_time]. For each date - or whatever granularity the data source supports - in the range, one transfer run is created. Note that runs are created per UTC time in the time range. DEPRECATED: use StartManualTransferRuns instead. @@ -1069,10 +1212,10 @@ def start_manual_transfer_runs( metadata=None, ): """ - Start manual transfer runs to be executed now with schedule\_time equal - to current time. The transfer runs can be created for a time range where - the run\_time is between start\_time (inclusive) and end\_time - (exclusive), or for a specific run\_time. + Start manual transfer runs to be executed now with schedule_time + equal to current time. The transfer runs can be created for a time range + where the run_time is between start_time (inclusive) and end_time + (exclusive), or for a specific run_time. Example: >>> from google.cloud import bigquery_datatransfer_v1 @@ -1089,8 +1232,8 @@ def start_manual_transfer_runs( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.TimeRange` - requested_run_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Specific run\_time for a transfer run to be started. The - requested\_run\_time must not be in the future. + requested_run_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Specific run_time for a transfer run to be started. The + requested_run_time must not be in the future. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.Timestamp` @@ -1225,75 +1368,6 @@ def get_transfer_run( request, retry=retry, timeout=timeout, metadata=metadata ) - def delete_transfer_run( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes the specified transfer run. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') - >>> - >>> client.delete_transfer_run(name) - - Args: - name (str): Required. The field will contain name of the resource requested, for - example: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_transfer_run" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_transfer_run" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_transfer_run, - default_retry=self._method_configs["DeleteTransferRun"].retry, - default_timeout=self._method_configs["DeleteTransferRun"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.DeleteTransferRunRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_transfer_run"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - def list_transfer_runs( self, parent, @@ -1329,8 +1403,8 @@ def list_transfer_runs( ... pass Args: - parent (str): Required. Name of transfer configuration for which transfer runs should - be retrieved. Format of transfer configuration resource name is: + parent (str): Required. Name of transfer configuration for which transfer runs + should be retrieved. Format of transfer configuration resource name is: ``projects/{project_id}/transferConfigs/{config_id}`` or ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. states (list[~google.cloud.bigquery_datatransfer_v1.types.TransferState]): When specified, only transfer runs with requested states are returned. @@ -1511,79 +1585,3 @@ def list_transfer_logs( response_token_field="next_page_token", ) return iterator - - def check_valid_creds( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns true if valid credentials exist for the given data source and - requesting user. - Some data sources doesn't support service account, so we need to talk to - them on behalf of the end user. This API just checks whether we have OAuth - token for the particular user, which is a pre-requisite before user can - create a transfer config. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_data_source_path('[PROJECT]', '[DATA_SOURCE]') - >>> - >>> response = client.check_valid_creds(name) - - Args: - name (str): Required. The data source in the form: - ``projects/{project_id}/dataSources/{data_source_id}`` or - ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "check_valid_creds" not in self._inner_api_calls: - self._inner_api_calls[ - "check_valid_creds" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.check_valid_creds, - default_retry=self._method_configs["CheckValidCreds"].retry, - default_timeout=self._method_configs["CheckValidCreds"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.CheckValidCredsRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["check_valid_creds"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py index 10ac8aaa4e3f..2ce5e82ed24e 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py @@ -17,6 +17,21 @@ } }, "methods": { + "DeleteTransferConfig": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteTransferRun": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "CheckValidCreds": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, "GetDataSource": { "timeout_millis": 60000, "retry_codes_name": "idempotent", @@ -37,11 +52,6 @@ "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, - "DeleteTransferConfig": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, "GetTransferConfig": { "timeout_millis": 60000, "retry_codes_name": "idempotent", @@ -67,11 +77,6 @@ "retry_codes_name": "idempotent", "retry_params_name": "default", }, - "DeleteTransferRun": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, "ListTransferRuns": { "timeout_millis": 60000, "retry_codes_name": "idempotent", @@ -82,11 +87,6 @@ "retry_codes_name": "idempotent", "retry_params_name": "default", }, - "CheckValidCreds": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, }, } } diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/enums.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/enums.py index b8dd52c8c1a0..fc16fdad65e7 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/enums.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/enums.py @@ -21,8 +21,8 @@ class NullValue(enum.IntEnum): """ - ``NullValue`` is a singleton enumeration to represent the null value for - the ``Value`` type union. + ``NullValue`` is a singleton enumeration to represent the null value + for the ``Value`` type union. The JSON representation for ``NullValue`` is JSON ``null``. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py index c480f976efb3..d13044c85bd1 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py @@ -115,6 +115,51 @@ def channel(self): """ return self._channel + @property + def delete_transfer_config(self): + """Return the gRPC stub for :meth:`DataTransferServiceClient.delete_transfer_config`. + + Deletes a data transfer configuration, + including any associated transfer runs and logs. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_transfer_service_stub"].DeleteTransferConfig + + @property + def delete_transfer_run(self): + """Return the gRPC stub for :meth:`DataTransferServiceClient.delete_transfer_run`. + + Deletes the specified transfer run. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_transfer_service_stub"].DeleteTransferRun + + @property + def check_valid_creds(self): + """Return the gRPC stub for :meth:`DataTransferServiceClient.check_valid_creds`. + + Returns true if valid credentials exist for the given data source and + requesting user. + Some data sources doesn't support service account, so we need to talk to + them on behalf of the end user. This API just checks whether we have OAuth + token for the particular user, which is a pre-requisite before user can + create a transfer config. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_transfer_service_stub"].CheckValidCreds + @property def get_data_source(self): """Return the gRPC stub for :meth:`DataTransferServiceClient.get_data_source`. @@ -170,20 +215,6 @@ def update_transfer_config(self): """ return self._stubs["data_transfer_service_stub"].UpdateTransferConfig - @property - def delete_transfer_config(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.delete_transfer_config`. - - Deletes a data transfer configuration, - including any associated transfer runs and logs. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].DeleteTransferConfig - @property def get_transfer_config(self): """Return the gRPC stub for :meth:`DataTransferServiceClient.get_transfer_config`. @@ -214,7 +245,7 @@ def list_transfer_configs(self): def schedule_transfer_runs(self): """Return the gRPC stub for :meth:`DataTransferServiceClient.schedule_transfer_runs`. - Creates transfer runs for a time range [start\_time, end\_time]. For + Creates transfer runs for a time range [start_time, end_time]. For each date - or whatever granularity the data source supports - in the range, one transfer run is created. Note that runs are created per UTC time in the time range. DEPRECATED: use StartManualTransferRuns instead. @@ -230,10 +261,10 @@ def schedule_transfer_runs(self): def start_manual_transfer_runs(self): """Return the gRPC stub for :meth:`DataTransferServiceClient.start_manual_transfer_runs`. - Start manual transfer runs to be executed now with schedule\_time equal - to current time. The transfer runs can be created for a time range where - the run\_time is between start\_time (inclusive) and end\_time - (exclusive), or for a specific run\_time. + Start manual transfer runs to be executed now with schedule_time + equal to current time. The transfer runs can be created for a time range + where the run_time is between start_time (inclusive) and end_time + (exclusive), or for a specific run_time. Returns: Callable: A callable which accepts the appropriate @@ -255,19 +286,6 @@ def get_transfer_run(self): """ return self._stubs["data_transfer_service_stub"].GetTransferRun - @property - def delete_transfer_run(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.delete_transfer_run`. - - Deletes the specified transfer run. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].DeleteTransferRun - @property def list_transfer_runs(self): """Return the gRPC stub for :meth:`DataTransferServiceClient.list_transfer_runs`. @@ -293,21 +311,3 @@ def list_transfer_logs(self): deserialized response object. """ return self._stubs["data_transfer_service_stub"].ListTransferLogs - - @property - def check_valid_creds(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.check_valid_creds`. - - Returns true if valid credentials exist for the given data source and - requesting user. - Some data sources doesn't support service account, so we need to talk to - them on behalf of the end user. This API just checks whether we have OAuth - token for the particular user, which is a pre-requisite before user can - create a transfer config. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].CheckValidCreds diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py index e5fa1508a14b..2a8005afbc61 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py @@ -1,10 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto +# source: google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -20,7 +17,7 @@ from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.bigquery_datatransfer_v1.proto import ( - transfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2, + transfer_pb2 as google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2, ) from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 @@ -30,21 +27,18 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto", + name="google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto", package="google.cloud.bigquery.datatransfer.v1", syntax="proto3", - serialized_options=_b( - "\n)com.google.cloud.bigquery.datatransfer.v1B\021DataTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1\352\002)Google::Cloud::Bigquery::DataTransfer::V1" - ), - serialized_pb=_b( - '\n>google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto"\x85\x05\n\x13\x44\x61taSourceParameter\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12M\n\x04type\x18\x04 \x01(\x0e\x32?.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type\x12\x10\n\x08required\x18\x05 \x01(\x08\x12\x10\n\x08repeated\x18\x06 \x01(\x08\x12\x18\n\x10validation_regex\x18\x07 \x01(\t\x12\x16\n\x0e\x61llowed_values\x18\x08 \x03(\t\x12/\n\tmin_value\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tmax_value\x18\n \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x06\x66ields\x18\x0b \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x1e\n\x16validation_description\x18\x0c \x01(\t\x12\x1b\n\x13validation_help_url\x18\r \x01(\t\x12\x11\n\timmutable\x18\x0e \x01(\x08\x12\x0f\n\x07recurse\x18\x0f \x01(\x08\x12\x12\n\ndeprecated\x18\x14 \x01(\x08"i\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x12\n\n\x06RECORD\x10\x05\x12\r\n\tPLUS_PAGE\x10\x06"\x9c\t\n\nDataSource\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x16\n\x0e\x64\x61ta_source_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x11\n\tclient_id\x18\x05 \x01(\t\x12\x0e\n\x06scopes\x18\x06 \x03(\t\x12N\n\rtransfer_type\x18\x07 \x01(\x0e\x32\x33.google.cloud.bigquery.datatransfer.v1.TransferTypeB\x02\x18\x01\x12\'\n\x1bsupports_multiple_transfers\x18\x08 \x01(\x08\x42\x02\x18\x01\x12\x1f\n\x17update_deadline_seconds\x18\t \x01(\x05\x12\x18\n\x10\x64\x65\x66\x61ult_schedule\x18\n \x01(\t\x12 \n\x18supports_custom_schedule\x18\x0b \x01(\x08\x12N\n\nparameters\x18\x0c \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x10\n\x08help_url\x18\r \x01(\t\x12_\n\x12\x61uthorization_type\x18\x0e \x01(\x0e\x32\x43.google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType\x12\\\n\x11\x64\x61ta_refresh_type\x18\x0f \x01(\x0e\x32\x41.google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType\x12(\n default_data_refresh_window_days\x18\x10 \x01(\x05\x12\x1c\n\x14manual_runs_disabled\x18\x11 \x01(\x08\x12<\n\x19minimum_schedule_interval\x18\x12 \x01(\x0b\x32\x19.google.protobuf.Duration"\x8a\x01\n\x11\x41uthorizationType\x12"\n\x1e\x41UTHORIZATION_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x41UTHORIZATION_CODE\x10\x01\x12"\n\x1eGOOGLE_PLUS_AUTHORIZATION_CODE\x10\x02\x12\x15\n\x11\x46IRST_PARTY_OAUTH\x10\x03"c\n\x0f\x44\x61taRefreshType\x12!\n\x1d\x44\x41TA_REFRESH_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eSLIDING_WINDOW\x10\x01\x12\x19\n\x15\x43USTOM_SLIDING_WINDOW\x10\x02:\xa5\x01\xea\x41\xa1\x01\n.bigquerydatatransfer.googleapis.com/DataSource\x12,projects/{project}/dataSources/{data_source}\x12\x41projects/{project}/locations/{location}/dataSources/{data_source}"\\\n\x14GetDataSourceRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource"\x87\x01\n\x16ListDataSourcesRequest\x12\x46\n\x06parent\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\x12.bigquerydatatransfer.googleapis.com/DataSource\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x80\x01\n\x17ListDataSourcesResponse\x12G\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\x8e\x02\n\x1b\x43reateTransferConfigRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\x12\x32\x62igquerydatatransfer.googleapis.com/TransferConfig\x12S\n\x0ftransfer_config\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x14\n\x0cversion_info\x18\x05 \x01(\t\x12\x1c\n\x14service_account_name\x18\x06 \x01(\t"\xf8\x01\n\x1bUpdateTransferConfigRequest\x12S\n\x0ftransfer_config\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x14\n\x0cversion_info\x18\x05 \x01(\t\x12\x1c\n\x14service_account_name\x18\x06 \x01(\t"d\n\x18GetTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig"g\n\x1b\x44\x65leteTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig"V\n\x15GetTransferRunRequest\x12=\n\x04name\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run"Y\n\x18\x44\x65leteTransferRunRequest\x12=\n\x04name\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run"\xa8\x01\n\x1aListTransferConfigsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\x12\x32\x62igquerydatatransfer.googleapis.com/TransferConfig\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x91\x01\n\x1bListTransferConfigsResponse\x12T\n\x10transfer_configs\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xde\x02\n\x17ListTransferRunsRequest\x12?\n\x06parent\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\x12\'bigquerydatatransfer.googleapis.com/Run\x12\x44\n\x06states\x18\x02 \x03(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12^\n\x0brun_attempt\x18\x05 \x01(\x0e\x32I.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt"5\n\nRunAttempt\x12\x1b\n\x17RUN_ATTEMPT_UNSPECIFIED\x10\x00\x12\n\n\x06LATEST\x10\x01"\x88\x01\n\x18ListTransferRunsResponse\x12N\n\rtransfer_runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRunB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xe0\x01\n\x17ListTransferLogsRequest\x12?\n\x06parent\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12]\n\rmessage_types\x18\x06 \x03(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity"\x90\x01\n\x18ListTransferLogsResponse\x12V\n\x11transfer_messages\x18\x01 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessageB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"^\n\x16\x43heckValidCredsRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource"2\n\x17\x43heckValidCredsResponse\x12\x17\n\x0fhas_valid_creds\x18\x01 \x01(\x08"\xd1\x01\n\x1bScheduleTransferRunsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x33\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x31\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02"`\n\x1cScheduleTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"\x87\x03\n\x1eStartManualTransferRunsRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12o\n\x14requested_time_range\x18\x03 \x01(\x0b\x32O.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRangeH\x00\x12\x38\n\x12requested_run_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x1ai\n\tTimeRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x06\n\x04time"c\n\x1fStartManualTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun2\xc3\x1f\n\x13\x44\x61taTransferService\x12\xe6\x01\n\rGetDataSource\x12;.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest\x1a\x31.google.cloud.bigquery.datatransfer.v1.DataSource"e\x82\xd3\xe4\x93\x02X\x12//v1/{name=projects/*/locations/*/dataSources/*}Z%\x12#/v1/{name=projects/*/dataSources/*}\xda\x41\x04name\x12\xf9\x01\n\x0fListDataSources\x12=.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest\x1a>.google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse"g\x82\xd3\xe4\x93\x02X\x12//v1/{parent=projects/*/locations/*}/dataSourcesZ%\x12#/v1/{parent=projects/*}/dataSources\xda\x41\x06parent\x12\xb6\x02\n\x14\x43reateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xa2\x01\x82\xd3\xe4\x93\x02\x82\x01"3/v1/{parent=projects/*/locations/*}/transferConfigs:\x0ftransfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\x0ftransfer_config\xda\x41\x16parent,transfer_config\x12\xdb\x02\n\x14UpdateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xc7\x01\x82\xd3\xe4\x93\x02\xa2\x01\x32\x43/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\x0ftransfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\x0ftransfer_config\xda\x41\x1btransfer_config,update_mask\x12\xe1\x01\n\x14\x44\x65leteTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest\x1a\x16.google.protobuf.Empty"m\x82\xd3\xe4\x93\x02`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*\'/v1/{name=projects/*/transferConfigs/*}\xda\x41\x04name\x12\xfa\x01\n\x11GetTransferConfig\x12?.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"m\x82\xd3\xe4\x93\x02`\x12\x33/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\x12\'/v1/{name=projects/*/transferConfigs/*}\xda\x41\x04name\x12\x8d\x02\n\x13ListTransferConfigs\x12\x41.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest\x1a\x42.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse"o\x82\xd3\xe4\x93\x02`\x12\x33/v1/{parent=projects/*/locations/*}/transferConfigsZ)\x12\'/v1/{parent=projects/*}/transferConfigs\xda\x41\x06parent\x12\xcd\x02\n\x14ScheduleTransferRuns\x12\x42.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest\x1a\x43.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse"\xab\x01\x88\x02\x01\x82\xd3\xe4\x93\x02\x84\x01"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\x01*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\x01*\xda\x41\x1aparent,start_time,end_time\x12\xbc\x02\n\x17StartManualTransferRuns\x12\x45.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest\x1a\x46.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse"\x91\x01\x82\xd3\xe4\x93\x02\x8a\x01"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\x01*Z>"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\x01*\x12\xff\x01\n\x0eGetTransferRun\x12<.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"{\x82\xd3\xe4\x93\x02n\x12:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\x12./v1/{name=projects/*/transferConfigs/*/runs/*}\xda\x41\x04name\x12\xe9\x01\n\x11\x44\x65leteTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest\x1a\x16.google.protobuf.Empty"{\x82\xd3\xe4\x93\x02n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\xda\x41\x04name\x12\x92\x02\n\x10ListTransferRuns\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse"}\x82\xd3\xe4\x93\x02n\x12:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\x12./v1/{parent=projects/*/transferConfigs/*}/runs\xda\x41\x06parent\x12\xb2\x02\n\x10ListTransferLogs\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse"\x9c\x01\x82\xd3\xe4\x93\x02\x8c\x01\x12I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\x12=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\xda\x41\x06parent\x12\x9e\x02\n\x0f\x43heckValidCreds\x12=.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest\x1a>.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse"\x8b\x01\x82\xd3\xe4\x93\x02~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\x01*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\x01*\xda\x41\x04name\x1aW\xca\x41#bigquerydatatransfer.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x8f\x02\n)com.google.cloud.bigquery.datatransfer.v1B\x11\x44\x61taTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1\xea\x02)Google::Cloud::Bigquery::DataTransfer::V1b\x06proto3' - ), + serialized_options=b"\n)com.google.cloud.bigquery.datatransfer.v1B\021DataTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1\352\002)Google::Cloud::Bigquery::DataTransfer::V1", + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n>google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a:google/cloud/bigquery_datatransfer_v1/proto/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto"\x85\x05\n\x13\x44\x61taSourceParameter\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12M\n\x04type\x18\x04 \x01(\x0e\x32?.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type\x12\x10\n\x08required\x18\x05 \x01(\x08\x12\x10\n\x08repeated\x18\x06 \x01(\x08\x12\x18\n\x10validation_regex\x18\x07 \x01(\t\x12\x16\n\x0e\x61llowed_values\x18\x08 \x03(\t\x12/\n\tmin_value\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tmax_value\x18\n \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x06\x66ields\x18\x0b \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x1e\n\x16validation_description\x18\x0c \x01(\t\x12\x1b\n\x13validation_help_url\x18\r \x01(\t\x12\x11\n\timmutable\x18\x0e \x01(\x08\x12\x0f\n\x07recurse\x18\x0f \x01(\x08\x12\x12\n\ndeprecated\x18\x14 \x01(\x08"i\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x12\n\n\x06RECORD\x10\x05\x12\r\n\tPLUS_PAGE\x10\x06"\x9c\t\n\nDataSource\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x16\n\x0e\x64\x61ta_source_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x11\n\tclient_id\x18\x05 \x01(\t\x12\x0e\n\x06scopes\x18\x06 \x03(\t\x12N\n\rtransfer_type\x18\x07 \x01(\x0e\x32\x33.google.cloud.bigquery.datatransfer.v1.TransferTypeB\x02\x18\x01\x12\'\n\x1bsupports_multiple_transfers\x18\x08 \x01(\x08\x42\x02\x18\x01\x12\x1f\n\x17update_deadline_seconds\x18\t \x01(\x05\x12\x18\n\x10\x64\x65\x66\x61ult_schedule\x18\n \x01(\t\x12 \n\x18supports_custom_schedule\x18\x0b \x01(\x08\x12N\n\nparameters\x18\x0c \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x10\n\x08help_url\x18\r \x01(\t\x12_\n\x12\x61uthorization_type\x18\x0e \x01(\x0e\x32\x43.google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType\x12\\\n\x11\x64\x61ta_refresh_type\x18\x0f \x01(\x0e\x32\x41.google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType\x12(\n default_data_refresh_window_days\x18\x10 \x01(\x05\x12\x1c\n\x14manual_runs_disabled\x18\x11 \x01(\x08\x12<\n\x19minimum_schedule_interval\x18\x12 \x01(\x0b\x32\x19.google.protobuf.Duration"\x8a\x01\n\x11\x41uthorizationType\x12"\n\x1e\x41UTHORIZATION_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x41UTHORIZATION_CODE\x10\x01\x12"\n\x1eGOOGLE_PLUS_AUTHORIZATION_CODE\x10\x02\x12\x15\n\x11\x46IRST_PARTY_OAUTH\x10\x03"c\n\x0f\x44\x61taRefreshType\x12!\n\x1d\x44\x41TA_REFRESH_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eSLIDING_WINDOW\x10\x01\x12\x19\n\x15\x43USTOM_SLIDING_WINDOW\x10\x02:\xa5\x01\xea\x41\xa1\x01\n.bigquerydatatransfer.googleapis.com/DataSource\x12,projects/{project}/dataSources/{data_source}\x12\x41projects/{project}/locations/{location}/dataSources/{data_source}"\\\n\x14GetDataSourceRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource"\x87\x01\n\x16ListDataSourcesRequest\x12\x46\n\x06parent\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\x12.bigquerydatatransfer.googleapis.com/DataSource\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x80\x01\n\x17ListDataSourcesResponse\x12G\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\x8e\x02\n\x1b\x43reateTransferConfigRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\x12\x32\x62igquerydatatransfer.googleapis.com/TransferConfig\x12S\n\x0ftransfer_config\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x14\n\x0cversion_info\x18\x05 \x01(\t\x12\x1c\n\x14service_account_name\x18\x06 \x01(\t"\xf8\x01\n\x1bUpdateTransferConfigRequest\x12S\n\x0ftransfer_config\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x14\n\x0cversion_info\x18\x05 \x01(\t\x12\x1c\n\x14service_account_name\x18\x06 \x01(\t"d\n\x18GetTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig"g\n\x1b\x44\x65leteTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig"V\n\x15GetTransferRunRequest\x12=\n\x04name\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run"Y\n\x18\x44\x65leteTransferRunRequest\x12=\n\x04name\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run"\xa8\x01\n\x1aListTransferConfigsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\x12\x32\x62igquerydatatransfer.googleapis.com/TransferConfig\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x91\x01\n\x1bListTransferConfigsResponse\x12T\n\x10transfer_configs\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xde\x02\n\x17ListTransferRunsRequest\x12?\n\x06parent\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\x12\'bigquerydatatransfer.googleapis.com/Run\x12\x44\n\x06states\x18\x02 \x03(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12^\n\x0brun_attempt\x18\x05 \x01(\x0e\x32I.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt"5\n\nRunAttempt\x12\x1b\n\x17RUN_ATTEMPT_UNSPECIFIED\x10\x00\x12\n\n\x06LATEST\x10\x01"\x88\x01\n\x18ListTransferRunsResponse\x12N\n\rtransfer_runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRunB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xe0\x01\n\x17ListTransferLogsRequest\x12?\n\x06parent\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12]\n\rmessage_types\x18\x06 \x03(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity"\x90\x01\n\x18ListTransferLogsResponse\x12V\n\x11transfer_messages\x18\x01 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessageB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"^\n\x16\x43heckValidCredsRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource"2\n\x17\x43heckValidCredsResponse\x12\x17\n\x0fhas_valid_creds\x18\x01 \x01(\x08"\xd1\x01\n\x1bScheduleTransferRunsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x33\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x31\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02"`\n\x1cScheduleTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"\x87\x03\n\x1eStartManualTransferRunsRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12o\n\x14requested_time_range\x18\x03 \x01(\x0b\x32O.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRangeH\x00\x12\x38\n\x12requested_run_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x1ai\n\tTimeRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x06\n\x04time"c\n\x1fStartManualTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun2\xc3\x1f\n\x13\x44\x61taTransferService\x12\xe6\x01\n\rGetDataSource\x12;.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest\x1a\x31.google.cloud.bigquery.datatransfer.v1.DataSource"e\x82\xd3\xe4\x93\x02X\x12//v1/{name=projects/*/locations/*/dataSources/*}Z%\x12#/v1/{name=projects/*/dataSources/*}\xda\x41\x04name\x12\xf9\x01\n\x0fListDataSources\x12=.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest\x1a>.google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse"g\x82\xd3\xe4\x93\x02X\x12//v1/{parent=projects/*/locations/*}/dataSourcesZ%\x12#/v1/{parent=projects/*}/dataSources\xda\x41\x06parent\x12\xb6\x02\n\x14\x43reateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xa2\x01\x82\xd3\xe4\x93\x02\x82\x01"3/v1/{parent=projects/*/locations/*}/transferConfigs:\x0ftransfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\x0ftransfer_config\xda\x41\x16parent,transfer_config\x12\xdb\x02\n\x14UpdateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xc7\x01\x82\xd3\xe4\x93\x02\xa2\x01\x32\x43/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\x0ftransfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\x0ftransfer_config\xda\x41\x1btransfer_config,update_mask\x12\xe1\x01\n\x14\x44\x65leteTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest\x1a\x16.google.protobuf.Empty"m\x82\xd3\xe4\x93\x02`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*\'/v1/{name=projects/*/transferConfigs/*}\xda\x41\x04name\x12\xfa\x01\n\x11GetTransferConfig\x12?.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"m\x82\xd3\xe4\x93\x02`\x12\x33/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\x12\'/v1/{name=projects/*/transferConfigs/*}\xda\x41\x04name\x12\x8d\x02\n\x13ListTransferConfigs\x12\x41.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest\x1a\x42.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse"o\x82\xd3\xe4\x93\x02`\x12\x33/v1/{parent=projects/*/locations/*}/transferConfigsZ)\x12\'/v1/{parent=projects/*}/transferConfigs\xda\x41\x06parent\x12\xcd\x02\n\x14ScheduleTransferRuns\x12\x42.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest\x1a\x43.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse"\xab\x01\x88\x02\x01\x82\xd3\xe4\x93\x02\x84\x01"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\x01*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\x01*\xda\x41\x1aparent,start_time,end_time\x12\xbc\x02\n\x17StartManualTransferRuns\x12\x45.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest\x1a\x46.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse"\x91\x01\x82\xd3\xe4\x93\x02\x8a\x01"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\x01*Z>"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\x01*\x12\xff\x01\n\x0eGetTransferRun\x12<.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"{\x82\xd3\xe4\x93\x02n\x12:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\x12./v1/{name=projects/*/transferConfigs/*/runs/*}\xda\x41\x04name\x12\xe9\x01\n\x11\x44\x65leteTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest\x1a\x16.google.protobuf.Empty"{\x82\xd3\xe4\x93\x02n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\xda\x41\x04name\x12\x92\x02\n\x10ListTransferRuns\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse"}\x82\xd3\xe4\x93\x02n\x12:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\x12./v1/{parent=projects/*/transferConfigs/*}/runs\xda\x41\x06parent\x12\xb2\x02\n\x10ListTransferLogs\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse"\x9c\x01\x82\xd3\xe4\x93\x02\x8c\x01\x12I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\x12=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\xda\x41\x06parent\x12\x9e\x02\n\x0f\x43heckValidCreds\x12=.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest\x1a>.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse"\x8b\x01\x82\xd3\xe4\x93\x02~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\x01*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\x01*\xda\x41\x04name\x1aW\xca\x41#bigquerydatatransfer.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x8f\x02\n)com.google.cloud.bigquery.datatransfer.v1B\x11\x44\x61taTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1\xea\x02)Google::Cloud::Bigquery::DataTransfer::V1b\x06proto3', dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.DESCRIPTOR, + google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, @@ -59,6 +53,7 @@ full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="TYPE_UNSPECIFIED", @@ -66,24 +61,55 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="STRING", index=1, number=1, serialized_options=None, type=None + name="STRING", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="INTEGER", index=2, number=2, serialized_options=None, type=None + name="INTEGER", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="DOUBLE", index=3, number=3, serialized_options=None, type=None + name="DOUBLE", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="BOOLEAN", index=4, number=4, serialized_options=None, type=None + name="BOOLEAN", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="RECORD", index=5, number=5, serialized_options=None, type=None + name="RECORD", + index=5, + number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="PLUS_PAGE", index=6, number=6, serialized_options=None, type=None + name="PLUS_PAGE", + index=6, + number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, @@ -98,6 +124,7 @@ full_name="google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="AUTHORIZATION_TYPE_UNSPECIFIED", @@ -105,6 +132,7 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="AUTHORIZATION_CODE", @@ -112,6 +140,7 @@ number=1, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="GOOGLE_PLUS_AUTHORIZATION_CODE", @@ -119,6 +148,7 @@ number=2, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="FIRST_PARTY_OAUTH", @@ -126,6 +156,7 @@ number=3, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, @@ -140,6 +171,7 @@ full_name="google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="DATA_REFRESH_TYPE_UNSPECIFIED", @@ -147,9 +179,15 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="SLIDING_WINDOW", index=1, number=1, serialized_options=None, type=None + name="SLIDING_WINDOW", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="CUSTOM_SLIDING_WINDOW", @@ -157,6 +195,7 @@ number=2, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, @@ -171,6 +210,7 @@ full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="RUN_ATTEMPT_UNSPECIFIED", @@ -178,9 +218,15 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="LATEST", index=1, number=1, serialized_options=None, type=None + name="LATEST", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, @@ -197,6 +243,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="param_id", @@ -207,7 +254,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -215,6 +262,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="display_name", @@ -225,7 +273,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -233,6 +281,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="description", @@ -243,7 +292,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -251,6 +300,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="type", @@ -269,6 +319,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="required", @@ -287,6 +338,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="repeated", @@ -305,6 +357,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="validation_regex", @@ -315,7 +368,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -323,6 +376,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="allowed_values", @@ -341,6 +395,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="min_value", @@ -359,6 +414,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="max_value", @@ -377,6 +433,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="fields", @@ -395,6 +452,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="validation_description", @@ -405,7 +463,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -413,6 +471,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="validation_help_url", @@ -423,7 +482,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -431,6 +490,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="immutable", @@ -449,6 +509,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="recurse", @@ -467,6 +528,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="deprecated", @@ -485,6 +547,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -506,6 +569,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -516,14 +580,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="data_source_id", @@ -534,7 +599,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -542,6 +607,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="display_name", @@ -552,7 +618,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -560,6 +626,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="description", @@ -570,7 +637,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -578,6 +645,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="client_id", @@ -588,7 +656,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -596,6 +664,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="scopes", @@ -614,6 +683,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="transfer_type", @@ -630,8 +700,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\030\001"), + serialized_options=b"\030\001", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="supports_multiple_transfers", @@ -648,8 +719,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\030\001"), + serialized_options=b"\030\001", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="update_deadline_seconds", @@ -668,6 +740,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="default_schedule", @@ -678,7 +751,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -686,6 +759,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="supports_custom_schedule", @@ -704,6 +778,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="parameters", @@ -722,6 +797,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="help_url", @@ -732,7 +808,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -740,6 +816,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="authorization_type", @@ -758,6 +835,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="data_refresh_type", @@ -776,6 +854,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="default_data_refresh_window_days", @@ -794,6 +873,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="manual_runs_disabled", @@ -812,6 +892,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="minimum_schedule_interval", @@ -830,14 +911,13 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], enum_types=[_DATASOURCE_AUTHORIZATIONTYPE, _DATASOURCE_DATAREFRESHTYPE], - serialized_options=_b( - "\352A\241\001\n.bigquerydatatransfer.googleapis.com/DataSource\022,projects/{project}/dataSources/{data_source}\022Aprojects/{project}/locations/{location}/dataSources/{data_source}" - ), + serialized_options=b"\352A\241\001\n.bigquerydatatransfer.googleapis.com/DataSource\022,projects/{project}/dataSources/{data_source}\022Aprojects/{project}/locations/{location}/dataSources/{data_source}", is_extendable=False, syntax="proto3", extension_ranges=[], @@ -853,6 +933,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -863,16 +944,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A0\n.bigquerydatatransfer.googleapis.com/DataSource" - ), + serialized_options=b"\340A\002\372A0\n.bigquerydatatransfer.googleapis.com/DataSource", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -894,6 +974,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="parent", @@ -904,16 +985,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A0\022.bigquerydatatransfer.googleapis.com/DataSource" - ), + serialized_options=b"\340A\002\372A0\022.bigquerydatatransfer.googleapis.com/DataSource", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_token", @@ -924,7 +1004,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -932,6 +1012,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_size", @@ -950,6 +1031,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -971,6 +1053,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="data_sources", @@ -989,6 +1072,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="next_page_token", @@ -999,14 +1083,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1028,6 +1113,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="parent", @@ -1038,16 +1124,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A4\0222bigquerydatatransfer.googleapis.com/TransferConfig" - ), + serialized_options=b"\340A\002\372A4\0222bigquerydatatransfer.googleapis.com/TransferConfig", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="transfer_config", @@ -1064,8 +1149,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="authorization_code", @@ -1076,7 +1162,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1084,6 +1170,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="version_info", @@ -1094,7 +1181,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1102,6 +1189,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="service_account_name", @@ -1112,7 +1200,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1120,6 +1208,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1141,6 +1230,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="transfer_config", @@ -1157,8 +1247,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="authorization_code", @@ -1169,7 +1260,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1177,6 +1268,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="update_mask", @@ -1193,8 +1285,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="version_info", @@ -1205,7 +1298,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1213,6 +1306,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="service_account_name", @@ -1223,7 +1317,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1231,6 +1325,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1252,6 +1347,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -1262,16 +1358,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" - ), + serialized_options=b"\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -1293,6 +1388,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -1303,16 +1399,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" - ), + serialized_options=b"\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -1334,6 +1429,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -1344,16 +1440,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A)\n'bigquerydatatransfer.googleapis.com/Run" - ), + serialized_options=b"\340A\002\372A)\n'bigquerydatatransfer.googleapis.com/Run", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -1375,6 +1470,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -1385,16 +1481,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A)\n'bigquerydatatransfer.googleapis.com/Run" - ), + serialized_options=b"\340A\002\372A)\n'bigquerydatatransfer.googleapis.com/Run", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -1416,6 +1511,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="parent", @@ -1426,16 +1522,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A4\0222bigquerydatatransfer.googleapis.com/TransferConfig" - ), + serialized_options=b"\340A\002\372A4\0222bigquerydatatransfer.googleapis.com/TransferConfig", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="data_source_ids", @@ -1454,6 +1549,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_token", @@ -1464,7 +1560,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1472,6 +1568,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_size", @@ -1490,6 +1587,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1511,6 +1609,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="transfer_configs", @@ -1527,8 +1626,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="next_page_token", @@ -1539,14 +1639,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1568,6 +1669,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="parent", @@ -1578,16 +1680,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A)\022'bigquerydatatransfer.googleapis.com/Run" - ), + serialized_options=b"\340A\002\372A)\022'bigquerydatatransfer.googleapis.com/Run", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="states", @@ -1606,6 +1707,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_token", @@ -1616,7 +1718,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1624,6 +1726,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_size", @@ -1642,6 +1745,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="run_attempt", @@ -1660,6 +1764,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1681,6 +1786,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="transfer_runs", @@ -1697,8 +1803,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="next_page_token", @@ -1709,14 +1816,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1738,6 +1846,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="parent", @@ -1748,16 +1857,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A)\n'bigquerydatatransfer.googleapis.com/Run" - ), + serialized_options=b"\340A\002\372A)\n'bigquerydatatransfer.googleapis.com/Run", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_token", @@ -1768,7 +1876,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1776,6 +1884,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_size", @@ -1794,6 +1903,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="message_types", @@ -1812,6 +1922,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1833,6 +1944,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="transfer_messages", @@ -1849,8 +1961,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="next_page_token", @@ -1861,14 +1974,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1890,6 +2004,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -1900,16 +2015,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A0\n.bigquerydatatransfer.googleapis.com/DataSource" - ), + serialized_options=b"\340A\002\372A0\n.bigquerydatatransfer.googleapis.com/DataSource", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -1931,6 +2045,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="has_valid_creds", @@ -1949,6 +2064,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -1970,6 +2086,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="parent", @@ -1980,16 +2097,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" - ), + serialized_options=b"\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="start_time", @@ -2006,8 +2122,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="end_time", @@ -2024,8 +2141,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -2047,6 +2165,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="runs", @@ -2065,6 +2184,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -2086,6 +2206,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="start_time", @@ -2104,6 +2225,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="end_time", @@ -2122,6 +2244,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -2142,6 +2265,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="parent", @@ -2152,16 +2276,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" - ), + serialized_options=b"\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="requested_time_range", @@ -2180,6 +2303,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="requested_run_time", @@ -2198,6 +2322,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -2213,6 +2338,7 @@ full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.time", index=0, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[], ) ], @@ -2227,6 +2353,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="runs", @@ -2245,6 +2372,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -2271,7 +2399,7 @@ _DATASOURCE.fields_by_name[ "transfer_type" ].enum_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERTYPE + google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERTYPE ) _DATASOURCE.fields_by_name["parameters"].message_type = _DATASOURCEPARAMETER _DATASOURCE.fields_by_name[ @@ -2287,12 +2415,12 @@ _CREATETRANSFERCONFIGREQUEST.fields_by_name[ "transfer_config" ].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG + google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG ) _UPDATETRANSFERCONFIGREQUEST.fields_by_name[ "transfer_config" ].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG + google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG ) _UPDATETRANSFERCONFIGREQUEST.fields_by_name[ "update_mask" @@ -2300,12 +2428,12 @@ _LISTTRANSFERCONFIGSRESPONSE.fields_by_name[ "transfer_configs" ].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG + google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG ) _LISTTRANSFERRUNSREQUEST.fields_by_name[ "states" ].enum_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERSTATE + google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERSTATE ) _LISTTRANSFERRUNSREQUEST.fields_by_name[ "run_attempt" @@ -2314,17 +2442,17 @@ _LISTTRANSFERRUNSRESPONSE.fields_by_name[ "transfer_runs" ].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN + google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN ) _LISTTRANSFERLOGSREQUEST.fields_by_name[ "message_types" ].enum_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERMESSAGE_MESSAGESEVERITY + google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERMESSAGE_MESSAGESEVERITY ) _LISTTRANSFERLOGSRESPONSE.fields_by_name[ "transfer_messages" ].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERMESSAGE + google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERMESSAGE ) _SCHEDULETRANSFERRUNSREQUEST.fields_by_name[ "start_time" @@ -2335,7 +2463,7 @@ _SCHEDULETRANSFERRUNSRESPONSE.fields_by_name[ "runs" ].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN + google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN ) _STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE.fields_by_name[ "start_time" @@ -2367,7 +2495,7 @@ _STARTMANUALTRANSFERRUNSRESPONSE.fields_by_name[ "runs" ].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN + google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN ) DESCRIPTOR.message_types_by_name["DataSourceParameter"] = _DATASOURCEPARAMETER DESCRIPTOR.message_types_by_name["DataSource"] = _DATASOURCE @@ -2415,10 +2543,10 @@ DataSourceParameter = _reflection.GeneratedProtocolMessageType( "DataSourceParameter", (_message.Message,), - dict( - DESCRIPTOR=_DATASOURCEPARAMETER, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""Represents a data source parameter with validation rules, so that + { + "DESCRIPTOR": _DATASOURCEPARAMETER, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """Represents a data source parameter with validation rules, so that parameters can be rendered in the UI. These parameters are given to us by supported data sources, and include all needed information for rendering and validation. Thus, whoever uses this api can decide to @@ -2463,17 +2591,17 @@ not be visible to users. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DataSourceParameter) - ), + }, ) _sym_db.RegisterMessage(DataSourceParameter) DataSource = _reflection.GeneratedProtocolMessageType( "DataSource", (_message.Message,), - dict( - DESCRIPTOR=_DATASOURCE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""Represents data source metadata. Metadata is sufficient to render UI + { + "DESCRIPTOR": _DATASOURCE, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """Represents data source metadata. Metadata is sufficient to render UI and request proper OAuth tokens. Attributes: name: @@ -2516,9 +2644,9 @@ Indicates the type of authorization. data_refresh_type: Specifies whether the data source supports automatic data - refresh for the past few days, and how it's supported. For + refresh for the past few days, and how it’s supported. For some data sources, data might not be complete until a few days - later, so it's useful to refresh data automatically. + later, so it’s useful to refresh data automatically. default_data_refresh_window_days: Default data refresh window on days. Only meaningful when ``data_refresh_type`` = ``SLIDING_WINDOW``. @@ -2529,17 +2657,17 @@ The minimum interval for scheduler to schedule runs. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DataSource) - ), + }, ) _sym_db.RegisterMessage(DataSource) GetDataSourceRequest = _reflection.GeneratedProtocolMessageType( "GetDataSourceRequest", (_message.Message,), - dict( - DESCRIPTOR=_GETDATASOURCEREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to get data source info. + { + "DESCRIPTOR": _GETDATASOURCEREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to get data source info. Attributes: name: Required. The field will contain name of the resource @@ -2549,24 +2677,24 @@ source_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest) - ), + }, ) _sym_db.RegisterMessage(GetDataSourceRequest) ListDataSourcesRequest = _reflection.GeneratedProtocolMessageType( "ListDataSourcesRequest", (_message.Message,), - dict( - DESCRIPTOR=_LISTDATASOURCESREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""Request to list supported data sources and their data transfer + { + "DESCRIPTOR": _LISTDATASOURCESREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """Request to list supported data sources and their data transfer settings. Attributes: parent: Required. The BigQuery project id for which data sources should be returned. Must be in the form: ``projects/{project_id}`` or - \`projects/{project\_id}/locations/{location\_id} + \`projects/{project_id}/locations/{location_id} page_token: Pagination token, which can be used to request a specific page of ``ListDataSourcesRequest`` list results. For multiple-page @@ -2578,17 +2706,17 @@ results. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest) - ), + }, ) _sym_db.RegisterMessage(ListDataSourcesRequest) ListDataSourcesResponse = _reflection.GeneratedProtocolMessageType( "ListDataSourcesResponse", (_message.Message,), - dict( - DESCRIPTOR=_LISTDATASOURCESRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""Returns list of supported data sources and their metadata. + { + "DESCRIPTOR": _LISTDATASOURCESRESPONSE, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """Returns list of supported data sources and their metadata. Attributes: data_sources: List of supported data sources and their transfer settings. @@ -2599,17 +2727,17 @@ of list results. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse) - ), + }, ) _sym_db.RegisterMessage(ListDataSourcesResponse) CreateTransferConfigRequest = _reflection.GeneratedProtocolMessageType( "CreateTransferConfigRequest", (_message.Message,), - dict( - DESCRIPTOR=_CREATETRANSFERCONFIGREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to create a data transfer configuration. If new credentials + { + "DESCRIPTOR": _CREATETRANSFERCONFIGREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to create a data transfer configuration. If new credentials are needed for this transfer configuration, an authorization code must be provided. If an authorization code is provided, the transfer configuration will be associated with the user id corresponding to the @@ -2619,8 +2747,8 @@ parent: Required. The BigQuery project id where the transfer configuration should be created. Must be in the format - projects/{project\_id}/locations/{location\_id} or - projects/{project\_id}. If specified location and location of + projects/{project_id}/locations/{location_id} or + projects/{project_id}. If specified location and location of the destination bigquery dataset do not match - the request will fail. transfer_config: @@ -2629,12 +2757,12 @@ Optional OAuth2 authorization code to use with this transfer configuration. This is required if new credentials are needed, as indicated by ``CheckValidCreds``. In order to obtain - authorization\_code, please make a request to https://www.gsta - tic.com/bigquerydatatransfer/oauthz/auth?client\_id=&scope=&re - direct\_uri= - client\_id should be OAuth client\_id of - BigQuery DTS API for the given data source returned by - ListDataSources method. - data\_source\_scopes are the scopes - returned by ListDataSources method. - redirect\_uri is an + authorization_code, please make a request to https://www.gstat + ic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redi + rect_uri= - client_id should be OAuth client_id of BigQuery + DTS API for the given data source returned by + ListDataSources method. - data_source_scopes are the scopes + returned by ListDataSources method. - redirect_uri is an optional parameter. If not specified, then authorization code is posted to the opener of authorization flow window. Otherwise it will be sent to the redirect uri. A special @@ -2645,11 +2773,11 @@ version_info: Optional version info. If users want to find a very recent access token, that is, immediately after approving access, - users have to set the version\_info claim in the token - request. To obtain the version\_info, users must use the - "none+gsession" response type. which be return a version\_info - back in the authorization response which be be put in a JWT - claim in the token request. + users have to set the version_info claim in the token request. + To obtain the version_info, users must use the “none+gsession” + response type. which be return a version_info back in the + authorization response which be be put in a JWT claim in the + token request. service_account_name: Optional service account name. If this field is set, transfer config will be created with this service account credentials. @@ -2657,17 +2785,17 @@ permissions to act as this service account. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest) - ), + }, ) _sym_db.RegisterMessage(CreateTransferConfigRequest) UpdateTransferConfigRequest = _reflection.GeneratedProtocolMessageType( "UpdateTransferConfigRequest", (_message.Message,), - dict( - DESCRIPTOR=_UPDATETRANSFERCONFIGREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to update a transfer configuration. To update the user id of + { + "DESCRIPTOR": _UPDATETRANSFERCONFIGREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to update a transfer configuration. To update the user id of the transfer configuration, an authorization code needs to be provided. Attributes: @@ -2677,12 +2805,12 @@ Optional OAuth2 authorization code to use with this transfer configuration. If it is provided, the transfer configuration will be associated with the authorizing user. In order to - obtain authorization\_code, please make a request to https://w - ww.gstatic.com/bigquerydatatransfer/oauthz/auth?client\_id=&sc - ope=&redirect\_uri= - client\_id should be OAuth client\_id - of BigQuery DTS API for the given data source returned by - ListDataSources method. - data\_source\_scopes are the scopes - returned by ListDataSources method. - redirect\_uri is an + obtain authorization_code, please make a request to https://ww + w.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scop + e=&redirect_uri= - client_id should be OAuth client_id of + BigQuery DTS API for the given data source returned by + ListDataSources method. - data_source_scopes are the scopes + returned by ListDataSources method. - redirect_uri is an optional parameter. If not specified, then authorization code is posted to the opener of authorization flow window. Otherwise it will be sent to the redirect uri. A special @@ -2696,30 +2824,30 @@ version_info: Optional version info. If users want to find a very recent access token, that is, immediately after approving access, - users have to set the version\_info claim in the token - request. To obtain the version\_info, users must use the - "none+gsession" response type. which be return a version\_info - back in the authorization response which be be put in a JWT - claim in the token request. + users have to set the version_info claim in the token request. + To obtain the version_info, users must use the “none+gsession” + response type. which be return a version_info back in the + authorization response which be be put in a JWT claim in the + token request. service_account_name: Optional service account name. If this field is set and - "service\_account\_name" is set in update\_mask, transfer - config will be updated to use this service account - credentials. It requires that requesting user calling this API - has permissions to act as this service account. + “service_account_name” is set in update_mask, transfer config + will be updated to use this service account credentials. It + requires that requesting user calling this API has permissions + to act as this service account. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest) - ), + }, ) _sym_db.RegisterMessage(UpdateTransferConfigRequest) GetTransferConfigRequest = _reflection.GeneratedProtocolMessageType( "GetTransferConfigRequest", (_message.Message,), - dict( - DESCRIPTOR=_GETTRANSFERCONFIGREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to get data transfer information. + { + "DESCRIPTOR": _GETTRANSFERCONFIGREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to get data transfer information. Attributes: name: Required. The field will contain name of the resource @@ -2729,17 +2857,17 @@ nfig_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest) - ), + }, ) _sym_db.RegisterMessage(GetTransferConfigRequest) DeleteTransferConfigRequest = _reflection.GeneratedProtocolMessageType( "DeleteTransferConfigRequest", (_message.Message,), - dict( - DESCRIPTOR=_DELETETRANSFERCONFIGREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to delete data transfer information. All associated transfer + { + "DESCRIPTOR": _DELETETRANSFERCONFIGREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to delete data transfer information. All associated transfer runs and log messages will be deleted as well. Attributes: name: @@ -2750,17 +2878,17 @@ nfig_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest) - ), + }, ) _sym_db.RegisterMessage(DeleteTransferConfigRequest) GetTransferRunRequest = _reflection.GeneratedProtocolMessageType( "GetTransferRunRequest", (_message.Message,), - dict( - DESCRIPTOR=_GETTRANSFERRUNREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to get data transfer run information. + { + "DESCRIPTOR": _GETTRANSFERRUNREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to get data transfer run information. Attributes: name: Required. The field will contain name of the resource @@ -2769,17 +2897,17 @@ ions/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest) - ), + }, ) _sym_db.RegisterMessage(GetTransferRunRequest) DeleteTransferRunRequest = _reflection.GeneratedProtocolMessageType( "DeleteTransferRunRequest", (_message.Message,), - dict( - DESCRIPTOR=_DELETETRANSFERRUNREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to delete data transfer run information. + { + "DESCRIPTOR": _DELETETRANSFERRUNREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to delete data transfer run information. Attributes: name: Required. The field will contain name of the resource @@ -2788,17 +2916,17 @@ ions/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest) - ), + }, ) _sym_db.RegisterMessage(DeleteTransferRunRequest) ListTransferConfigsRequest = _reflection.GeneratedProtocolMessageType( "ListTransferConfigsRequest", (_message.Message,), - dict( - DESCRIPTOR=_LISTTRANSFERCONFIGSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to list data transfers configured for a BigQuery project. + { + "DESCRIPTOR": _LISTTRANSFERCONFIGSREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to list data transfers configured for a BigQuery project. Attributes: parent: Required. The BigQuery project id for which data sources @@ -2818,17 +2946,17 @@ results. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest) - ), + }, ) _sym_db.RegisterMessage(ListTransferConfigsRequest) ListTransferConfigsResponse = _reflection.GeneratedProtocolMessageType( "ListTransferConfigsResponse", (_message.Message,), - dict( - DESCRIPTOR=_LISTTRANSFERCONFIGSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""The returned list of pipelines in the project. + { + "DESCRIPTOR": _LISTTRANSFERCONFIGSRESPONSE, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """The returned list of pipelines in the project. Attributes: transfer_configs: Output only. The stored pipeline transfer configurations. @@ -2839,17 +2967,17 @@ page of list results. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse) - ), + }, ) _sym_db.RegisterMessage(ListTransferConfigsResponse) ListTransferRunsRequest = _reflection.GeneratedProtocolMessageType( "ListTransferRunsRequest", (_message.Message,), - dict( - DESCRIPTOR=_LISTTRANSFERRUNSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to list data transfer runs. UI can use this method to + { + "DESCRIPTOR": _LISTTRANSFERRUNSREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to list data transfer runs. UI can use this method to show/filter specific data transfer runs. The data source can use this method to request all scheduled transfer runs. Attributes: @@ -2876,17 +3004,17 @@ Indicates how run attempts are to be pulled. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest) - ), + }, ) _sym_db.RegisterMessage(ListTransferRunsRequest) ListTransferRunsResponse = _reflection.GeneratedProtocolMessageType( "ListTransferRunsResponse", (_message.Message,), - dict( - DESCRIPTOR=_LISTTRANSFERRUNSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""The returned list of pipelines in the project. + { + "DESCRIPTOR": _LISTTRANSFERRUNSRESPONSE, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """The returned list of pipelines in the project. Attributes: transfer_runs: Output only. The stored pipeline transfer runs. @@ -2897,17 +3025,17 @@ page of list results. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse) - ), + }, ) _sym_db.RegisterMessage(ListTransferRunsResponse) ListTransferLogsRequest = _reflection.GeneratedProtocolMessageType( "ListTransferLogsRequest", (_message.Message,), - dict( - DESCRIPTOR=_LISTTRANSFERLOGSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to get user facing log messages associated with data + { + "DESCRIPTOR": _LISTTRANSFERLOGSREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to get user facing log messages associated with data transfer run. Attributes: parent: @@ -2929,17 +3057,17 @@ ERROR messages are returned. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest) - ), + }, ) _sym_db.RegisterMessage(ListTransferLogsRequest) ListTransferLogsResponse = _reflection.GeneratedProtocolMessageType( "ListTransferLogsResponse", (_message.Message,), - dict( - DESCRIPTOR=_LISTTRANSFERLOGSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""The returned list transfer run messages. + { + "DESCRIPTOR": _LISTTRANSFERLOGSRESPONSE, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """The returned list transfer run messages. Attributes: transfer_messages: Output only. The stored pipeline transfer messages. @@ -2950,17 +3078,17 @@ page of list results. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse) - ), + }, ) _sym_db.RegisterMessage(ListTransferLogsResponse) CheckValidCredsRequest = _reflection.GeneratedProtocolMessageType( "CheckValidCredsRequest", (_message.Message,), - dict( - DESCRIPTOR=_CHECKVALIDCREDSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to determine whether the user has valid credentials. This + { + "DESCRIPTOR": _CHECKVALIDCREDSREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to determine whether the user has valid credentials. This method is used to limit the number of OAuth popups in the user interface. The user id is inferred from the API call context. If the data source has the Google+ authorization type, this method returns @@ -2974,33 +3102,33 @@ source_id}``. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest) - ), + }, ) _sym_db.RegisterMessage(CheckValidCredsRequest) CheckValidCredsResponse = _reflection.GeneratedProtocolMessageType( "CheckValidCredsResponse", (_message.Message,), - dict( - DESCRIPTOR=_CHECKVALIDCREDSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A response indicating whether the credentials exist and are valid. + { + "DESCRIPTOR": _CHECKVALIDCREDSRESPONSE, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A response indicating whether the credentials exist and are valid. Attributes: has_valid_creds: If set to ``true``, the credentials exist and are valid. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse) - ), + }, ) _sym_db.RegisterMessage(CheckValidCredsResponse) ScheduleTransferRunsRequest = _reflection.GeneratedProtocolMessageType( "ScheduleTransferRunsRequest", (_message.Message,), - dict( - DESCRIPTOR=_SCHEDULETRANSFERRUNSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to schedule transfer runs for a time range. + { + "DESCRIPTOR": _SCHEDULETRANSFERRUNSREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to schedule transfer runs for a time range. Attributes: parent: Required. Transfer configuration name in the form: @@ -3015,57 +3143,57 @@ ``"2017-05-30T00:00:00+00:00"``. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest) - ), + }, ) _sym_db.RegisterMessage(ScheduleTransferRunsRequest) ScheduleTransferRunsResponse = _reflection.GeneratedProtocolMessageType( "ScheduleTransferRunsResponse", (_message.Message,), - dict( - DESCRIPTOR=_SCHEDULETRANSFERRUNSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A response to schedule transfer runs for a time range. + { + "DESCRIPTOR": _SCHEDULETRANSFERRUNSRESPONSE, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A response to schedule transfer runs for a time range. Attributes: runs: The transfer runs that were scheduled. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse) - ), + }, ) _sym_db.RegisterMessage(ScheduleTransferRunsResponse) StartManualTransferRunsRequest = _reflection.GeneratedProtocolMessageType( "StartManualTransferRunsRequest", (_message.Message,), - dict( - TimeRange=_reflection.GeneratedProtocolMessageType( + { + "TimeRange": _reflection.GeneratedProtocolMessageType( "TimeRange", (_message.Message,), - dict( - DESCRIPTOR=_STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A specification for a time range, this will request transfer runs with - run\_time between start\_time (inclusive) and end\_time (exclusive). + { + "DESCRIPTOR": _STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A specification for a time range, this will request transfer runs with + run_time between start_time (inclusive) and end_time (exclusive). Attributes: start_time: Start time of the range of transfer runs. For example, - ``"2017-05-25T00:00:00+00:00"``. The start\_time must be - strictly less than the end\_time. Creates transfer runs where - run\_time is in the range betwen start\_time (inclusive) and - end\_time (exlusive). + ``"2017-05-25T00:00:00+00:00"``. The start_time must be + strictly less than the end_time. Creates transfer runs where + run_time is in the range betwen start_time (inclusive) and + end_time (exlusive). end_time: End time of the range of transfer runs. For example, - ``"2017-05-30T00:00:00+00:00"``. The end\_time must not be in - the future. Creates transfer runs where run\_time is in the - range betwen start\_time (inclusive) and end\_time (exlusive). + ``"2017-05-30T00:00:00+00:00"``. The end_time must not be in + the future. Creates transfer runs where run_time is in the + range betwen start_time (inclusive) and end_time (exlusive). """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange) - ), + }, ), - DESCRIPTOR=_STARTMANUALTRANSFERRUNSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A request to start manual transfer runs. + "DESCRIPTOR": _STARTMANUALTRANSFERRUNSREQUEST, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A request to start manual transfer runs. Attributes: parent: Transfer configuration name in the form: @@ -3074,15 +3202,15 @@ nfig_id}``. time: The requested time specification - this can be a time range or - a specific run\_time. + a specific run_time. requested_time_range: Time range for the transfer runs that should be started. requested_run_time: - Specific run\_time for a transfer run to be started. The - requested\_run\_time must not be in the future. + Specific run_time for a transfer run to be started. The + requested_run_time must not be in the future. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest) - ), + }, ) _sym_db.RegisterMessage(StartManualTransferRunsRequest) _sym_db.RegisterMessage(StartManualTransferRunsRequest.TimeRange) @@ -3090,16 +3218,16 @@ StartManualTransferRunsResponse = _reflection.GeneratedProtocolMessageType( "StartManualTransferRunsResponse", (_message.Message,), - dict( - DESCRIPTOR=_STARTMANUALTRANSFERRUNSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datatransfer_pb2", - __doc__="""A response to start manual transfer runs. + { + "DESCRIPTOR": _STARTMANUALTRANSFERRUNSRESPONSE, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", + "__doc__": """A response to start manual transfer runs. Attributes: runs: The transfer runs that were created. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse) - ), + }, ) _sym_db.RegisterMessage(StartManualTransferRunsResponse) @@ -3140,9 +3268,8 @@ full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService", file=DESCRIPTOR, index=0, - serialized_options=_b( - "\312A#bigquerydatatransfer.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" - ), + serialized_options=b"\312A#bigquerydatatransfer.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform", + create_key=_descriptor._internal_create_key, serialized_start=5683, serialized_end=9718, methods=[ @@ -3153,9 +3280,8 @@ containing_service=None, input_type=_GETDATASOURCEREQUEST, output_type=_DATASOURCE, - serialized_options=_b( - "\202\323\344\223\002X\022//v1/{name=projects/*/locations/*/dataSources/*}Z%\022#/v1/{name=projects/*/dataSources/*}\332A\004name" - ), + serialized_options=b"\202\323\344\223\002X\022//v1/{name=projects/*/locations/*/dataSources/*}Z%\022#/v1/{name=projects/*/dataSources/*}\332A\004name", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ListDataSources", @@ -3164,9 +3290,8 @@ containing_service=None, input_type=_LISTDATASOURCESREQUEST, output_type=_LISTDATASOURCESRESPONSE, - serialized_options=_b( - "\202\323\344\223\002X\022//v1/{parent=projects/*/locations/*}/dataSourcesZ%\022#/v1/{parent=projects/*}/dataSources\332A\006parent" - ), + serialized_options=b"\202\323\344\223\002X\022//v1/{parent=projects/*/locations/*}/dataSourcesZ%\022#/v1/{parent=projects/*}/dataSources\332A\006parent", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="CreateTransferConfig", @@ -3174,10 +3299,9 @@ index=2, containing_service=None, input_type=_CREATETRANSFERCONFIGREQUEST, - output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, - serialized_options=_b( - '\202\323\344\223\002\202\001"3/v1/{parent=projects/*/locations/*}/transferConfigs:\017transfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\017transfer_config\332A\026parent,transfer_config' - ), + output_type=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, + serialized_options=b'\202\323\344\223\002\202\001"3/v1/{parent=projects/*/locations/*}/transferConfigs:\017transfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\017transfer_config\332A\026parent,transfer_config', + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="UpdateTransferConfig", @@ -3185,10 +3309,9 @@ index=3, containing_service=None, input_type=_UPDATETRANSFERCONFIGREQUEST, - output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, - serialized_options=_b( - "\202\323\344\223\002\242\0012C/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\017transfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\017transfer_config\332A\033transfer_config,update_mask" - ), + output_type=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, + serialized_options=b"\202\323\344\223\002\242\0012C/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\017transfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\017transfer_config\332A\033transfer_config,update_mask", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="DeleteTransferConfig", @@ -3197,9 +3320,8 @@ containing_service=None, input_type=_DELETETRANSFERCONFIGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*'/v1/{name=projects/*/transferConfigs/*}\332A\004name" - ), + serialized_options=b"\202\323\344\223\002`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*'/v1/{name=projects/*/transferConfigs/*}\332A\004name", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="GetTransferConfig", @@ -3207,10 +3329,9 @@ index=5, containing_service=None, input_type=_GETTRANSFERCONFIGREQUEST, - output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, - serialized_options=_b( - "\202\323\344\223\002`\0223/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\022'/v1/{name=projects/*/transferConfigs/*}\332A\004name" - ), + output_type=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, + serialized_options=b"\202\323\344\223\002`\0223/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\022'/v1/{name=projects/*/transferConfigs/*}\332A\004name", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ListTransferConfigs", @@ -3219,9 +3340,8 @@ containing_service=None, input_type=_LISTTRANSFERCONFIGSREQUEST, output_type=_LISTTRANSFERCONFIGSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002`\0223/v1/{parent=projects/*/locations/*}/transferConfigsZ)\022'/v1/{parent=projects/*}/transferConfigs\332A\006parent" - ), + serialized_options=b"\202\323\344\223\002`\0223/v1/{parent=projects/*/locations/*}/transferConfigsZ)\022'/v1/{parent=projects/*}/transferConfigs\332A\006parent", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ScheduleTransferRuns", @@ -3230,9 +3350,8 @@ containing_service=None, input_type=_SCHEDULETRANSFERRUNSREQUEST, output_type=_SCHEDULETRANSFERRUNSRESPONSE, - serialized_options=_b( - '\210\002\001\202\323\344\223\002\204\001"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\001*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\001*\332A\032parent,start_time,end_time' - ), + serialized_options=b'\210\002\001\202\323\344\223\002\204\001"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\001*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\001*\332A\032parent,start_time,end_time', + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="StartManualTransferRuns", @@ -3241,9 +3360,8 @@ containing_service=None, input_type=_STARTMANUALTRANSFERRUNSREQUEST, output_type=_STARTMANUALTRANSFERRUNSRESPONSE, - serialized_options=_b( - '\202\323\344\223\002\212\001"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\001*Z>"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\001*' - ), + serialized_options=b'\202\323\344\223\002\212\001"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\001*Z>"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\001*', + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="GetTransferRun", @@ -3251,10 +3369,9 @@ index=9, containing_service=None, input_type=_GETTRANSFERRUNREQUEST, - output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN, - serialized_options=_b( - "\202\323\344\223\002n\022:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\022./v1/{name=projects/*/transferConfigs/*/runs/*}\332A\004name" - ), + output_type=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN, + serialized_options=b"\202\323\344\223\002n\022:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\022./v1/{name=projects/*/transferConfigs/*/runs/*}\332A\004name", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="DeleteTransferRun", @@ -3263,9 +3380,8 @@ containing_service=None, input_type=_DELETETRANSFERRUNREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\332A\004name" - ), + serialized_options=b"\202\323\344\223\002n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\332A\004name", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ListTransferRuns", @@ -3274,9 +3390,8 @@ containing_service=None, input_type=_LISTTRANSFERRUNSREQUEST, output_type=_LISTTRANSFERRUNSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002n\022:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\022./v1/{parent=projects/*/transferConfigs/*}/runs\332A\006parent" - ), + serialized_options=b"\202\323\344\223\002n\022:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\022./v1/{parent=projects/*/transferConfigs/*}/runs\332A\006parent", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ListTransferLogs", @@ -3285,9 +3400,8 @@ containing_service=None, input_type=_LISTTRANSFERLOGSREQUEST, output_type=_LISTTRANSFERLOGSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002\214\001\022I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\022=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\332A\006parent" - ), + serialized_options=b"\202\323\344\223\002\214\001\022I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\022=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\332A\006parent", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="CheckValidCreds", @@ -3296,9 +3410,8 @@ containing_service=None, input_type=_CHECKVALIDCREDSREQUEST, output_type=_CHECKVALIDCREDSRESPONSE, - serialized_options=_b( - '\202\323\344\223\002~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\001*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\001*\332A\004name' - ), + serialized_options=b'\202\323\344\223\002~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\001*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\001*\332A\004name', + create_key=_descriptor._internal_create_key, ), ], ) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py index ab525e22457c..e39179b15c0e 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py @@ -2,10 +2,10 @@ import grpc from google.cloud.bigquery_datatransfer_v1.proto import ( - datatransfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2, + datatransfer_pb2 as google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2, ) from google.cloud.bigquery_datatransfer_v1.proto import ( - transfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2, + transfer_pb2 as google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2, ) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 @@ -25,73 +25,73 @@ def __init__(self, channel): """ self.GetDataSource = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetDataSource", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetDataSourceRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DataSource.FromString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetDataSourceRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DataSource.FromString, ) self.ListDataSources = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListDataSources", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesResponse.FromString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesResponse.FromString, ) self.CreateTransferConfig = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CreateTransferConfig", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.CreateTransferConfigRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CreateTransferConfigRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, ) self.UpdateTransferConfig = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/UpdateTransferConfig", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.UpdateTransferConfigRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.UpdateTransferConfigRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, ) self.DeleteTransferConfig = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferConfigRequest.SerializeToString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferConfigRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.GetTransferConfig = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferConfig", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferConfigRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferConfigRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, ) self.ListTransferConfigs = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferConfigs", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsResponse.FromString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsResponse.FromString, ) self.ScheduleTransferRuns = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ScheduleTransferRuns", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsResponse.FromString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsResponse.FromString, ) self.StartManualTransferRuns = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/StartManualTransferRuns", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsResponse.FromString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsResponse.FromString, ) self.GetTransferRun = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferRun", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferRunRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.FromString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferRunRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.FromString, ) self.DeleteTransferRun = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferRunRequest.SerializeToString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferRunRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.ListTransferRuns = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferRuns", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsResponse.FromString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsResponse.FromString, ) self.ListTransferLogs = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferLogs", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsResponse.FromString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsResponse.FromString, ) self.CheckValidCreds = channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CheckValidCreds", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsResponse.FromString, + request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsResponse.FromString, ) @@ -221,73 +221,73 @@ def add_DataTransferServiceServicer_to_server(servicer, server): rpc_method_handlers = { "GetDataSource": grpc.unary_unary_rpc_method_handler( servicer.GetDataSource, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetDataSourceRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DataSource.SerializeToString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetDataSourceRequest.FromString, + response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DataSource.SerializeToString, ), "ListDataSources": grpc.unary_unary_rpc_method_handler( servicer.ListDataSources, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesRequest.FromString, + response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesResponse.SerializeToString, ), "CreateTransferConfig": grpc.unary_unary_rpc_method_handler( servicer.CreateTransferConfig, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.CreateTransferConfigRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.SerializeToString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CreateTransferConfigRequest.FromString, + response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.SerializeToString, ), "UpdateTransferConfig": grpc.unary_unary_rpc_method_handler( servicer.UpdateTransferConfig, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.UpdateTransferConfigRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.SerializeToString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.UpdateTransferConfigRequest.FromString, + response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.SerializeToString, ), "DeleteTransferConfig": grpc.unary_unary_rpc_method_handler( servicer.DeleteTransferConfig, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferConfigRequest.FromString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferConfigRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), "GetTransferConfig": grpc.unary_unary_rpc_method_handler( servicer.GetTransferConfig, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferConfigRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.SerializeToString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferConfigRequest.FromString, + response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.SerializeToString, ), "ListTransferConfigs": grpc.unary_unary_rpc_method_handler( servicer.ListTransferConfigs, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsRequest.FromString, + response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsResponse.SerializeToString, ), "ScheduleTransferRuns": grpc.unary_unary_rpc_method_handler( servicer.ScheduleTransferRuns, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsRequest.FromString, + response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsResponse.SerializeToString, ), "StartManualTransferRuns": grpc.unary_unary_rpc_method_handler( servicer.StartManualTransferRuns, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsRequest.FromString, + response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsResponse.SerializeToString, ), "GetTransferRun": grpc.unary_unary_rpc_method_handler( servicer.GetTransferRun, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferRunRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.SerializeToString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferRunRequest.FromString, + response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.SerializeToString, ), "DeleteTransferRun": grpc.unary_unary_rpc_method_handler( servicer.DeleteTransferRun, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferRunRequest.FromString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferRunRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), "ListTransferRuns": grpc.unary_unary_rpc_method_handler( servicer.ListTransferRuns, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsRequest.FromString, + response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsResponse.SerializeToString, ), "ListTransferLogs": grpc.unary_unary_rpc_method_handler( servicer.ListTransferLogs, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsRequest.FromString, + response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsResponse.SerializeToString, ), "CheckValidCreds": grpc.unary_unary_rpc_method_handler( servicer.CheckValidCreds, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsRequest.FromString, + response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py index 940a37410c09..79799ed54950 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py @@ -1,10 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/datatransfer_v1/proto/transfer.proto +# source: google/cloud/bigquery_datatransfer_v1/proto/transfer.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message @@ -24,15 +21,12 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery/datatransfer_v1/proto/transfer.proto", + name="google/cloud/bigquery_datatransfer_v1/proto/transfer.proto", package="google.cloud.bigquery.datatransfer.v1", syntax="proto3", - serialized_options=_b( - "\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\242\002\005GCBDT\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1\352\002)Google::Cloud::Bigquery::DataTransfer::V1" - ), - serialized_pb=_b( - '\n:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"0\n\x10\x45mailPreferences\x12\x1c\n\x14\x65nable_failure_email\x18\x01 \x01(\x08"\x90\x01\n\x0fScheduleOptions\x12\x1f\n\x17\x64isable_auto_scheduling\x18\x03 \x01(\x08\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xd7\x06\n\x0eTransferConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12 \n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x05 \x01(\t\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x10\n\x08schedule\x18\x07 \x01(\t\x12P\n\x10schedule_options\x18\x18 \x01(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.ScheduleOptions\x12 \n\x18\x64\x61ta_refresh_window_days\x18\x0c \x01(\x05\x12\x10\n\x08\x64isabled\x18\r \x01(\x08\x12\x34\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x36\n\rnext_run_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x05state\x18\n \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferStateB\x03\xe0\x41\x03\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x1b\n\x0e\x64\x61taset_region\x18\x0e \x01(\tB\x03\xe0\x41\x03\x12!\n\x19notification_pubsub_topic\x18\x0f \x01(\t\x12R\n\x11\x65mail_preferences\x18\x12 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferences:\xb9\x01\xea\x41\xb5\x01\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x34projects/{project}/transferConfigs/{transfer_config}\x12Iprojects/{project}/locations/{location}/transferConfigs/{transfer_config}B\r\n\x0b\x64\x65stination"\xfa\x06\n\x0bTransferRun\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08run_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\x0c\x65rror_status\x18\x15 \x01(\x0b\x32\x12.google.rpc.Status\x12\x33\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12,\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.StructB\x03\xe0\x41\x03\x12%\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x03H\x00\x12\x1b\n\x0e\x64\x61ta_source_id\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x05state\x18\x08 \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x15\n\x08schedule\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12&\n\x19notification_pubsub_topic\x18\x17 \x01(\tB\x03\xe0\x41\x03\x12W\n\x11\x65mail_preferences\x18\x19 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferencesB\x03\xe0\x41\x03:\xc4\x01\xea\x41\xc0\x01\n\'bigquerydatatransfer.googleapis.com/Run\x12?projects/{project}/transferConfigs/{transfer_config}/runs/{run}\x12Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}B\r\n\x0b\x64\x65stination"\x8a\x02\n\x0fTransferMessage\x12\x30\n\x0cmessage_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x08severity\x18\x02 \x01(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\x12\x14\n\x0cmessage_text\x18\x03 \x01(\t"U\n\x0fMessageSeverity\x12 \n\x1cMESSAGE_SEVERITY_UNSPECIFIED\x10\x00\x12\x08\n\x04INFO\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03*K\n\x0cTransferType\x12\x1d\n\x19TRANSFER_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41TCH\x10\x01\x12\r\n\tSTREAMING\x10\x02\x1a\x02\x18\x01*s\n\rTransferState\x12\x1e\n\x1aTRANSFER_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\r\n\tCANCELLED\x10\x06\x42\x93\x02\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xa2\x02\x05GCBDT\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1\xea\x02)Google::Cloud::Bigquery::DataTransfer::V1b\x06proto3' - ), + serialized_options=b"\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\242\002\005GCBDT\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1\352\002)Google::Cloud::Bigquery::DataTransfer::V1", + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n:google/cloud/bigquery_datatransfer_v1/proto/transfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"0\n\x10\x45mailPreferences\x12\x1c\n\x14\x65nable_failure_email\x18\x01 \x01(\x08"\x90\x01\n\x0fScheduleOptions\x12\x1f\n\x17\x64isable_auto_scheduling\x18\x03 \x01(\x08\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xd7\x06\n\x0eTransferConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12 \n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x05 \x01(\t\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x10\n\x08schedule\x18\x07 \x01(\t\x12P\n\x10schedule_options\x18\x18 \x01(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.ScheduleOptions\x12 \n\x18\x64\x61ta_refresh_window_days\x18\x0c \x01(\x05\x12\x10\n\x08\x64isabled\x18\r \x01(\x08\x12\x34\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x36\n\rnext_run_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x05state\x18\n \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferStateB\x03\xe0\x41\x03\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x1b\n\x0e\x64\x61taset_region\x18\x0e \x01(\tB\x03\xe0\x41\x03\x12!\n\x19notification_pubsub_topic\x18\x0f \x01(\t\x12R\n\x11\x65mail_preferences\x18\x12 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferences:\xb9\x01\xea\x41\xb5\x01\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x34projects/{project}/transferConfigs/{transfer_config}\x12Iprojects/{project}/locations/{location}/transferConfigs/{transfer_config}B\r\n\x0b\x64\x65stination"\xfa\x06\n\x0bTransferRun\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08run_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\x0c\x65rror_status\x18\x15 \x01(\x0b\x32\x12.google.rpc.Status\x12\x33\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12,\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.StructB\x03\xe0\x41\x03\x12%\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x03H\x00\x12\x1b\n\x0e\x64\x61ta_source_id\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x05state\x18\x08 \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x15\n\x08schedule\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12&\n\x19notification_pubsub_topic\x18\x17 \x01(\tB\x03\xe0\x41\x03\x12W\n\x11\x65mail_preferences\x18\x19 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferencesB\x03\xe0\x41\x03:\xc4\x01\xea\x41\xc0\x01\n\'bigquerydatatransfer.googleapis.com/Run\x12?projects/{project}/transferConfigs/{transfer_config}/runs/{run}\x12Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}B\r\n\x0b\x64\x65stination"\x8a\x02\n\x0fTransferMessage\x12\x30\n\x0cmessage_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x08severity\x18\x02 \x01(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\x12\x14\n\x0cmessage_text\x18\x03 \x01(\t"U\n\x0fMessageSeverity\x12 \n\x1cMESSAGE_SEVERITY_UNSPECIFIED\x10\x00\x12\x08\n\x04INFO\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03*K\n\x0cTransferType\x12\x1d\n\x19TRANSFER_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41TCH\x10\x01\x12\r\n\tSTREAMING\x10\x02\x1a\x02\x18\x01*s\n\rTransferState\x12\x1e\n\x1aTRANSFER_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\r\n\tCANCELLED\x10\x06\x42\x93\x02\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xa2\x02\x05GCBDT\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1\xea\x02)Google::Cloud::Bigquery::DataTransfer::V1b\x06proto3', dependencies=[ google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, @@ -47,6 +41,7 @@ full_name="google.cloud.bigquery.datatransfer.v1.TransferType", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="TRANSFER_TYPE_UNSPECIFIED", @@ -54,16 +49,27 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="BATCH", index=1, number=1, serialized_options=None, type=None + name="BATCH", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="STREAMING", index=2, number=2, serialized_options=None, type=None + name="STREAMING", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, - serialized_options=_b("\030\001"), + serialized_options=b"\030\001", serialized_start=2466, serialized_end=2541, ) @@ -75,6 +81,7 @@ full_name="google.cloud.bigquery.datatransfer.v1.TransferState", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="TRANSFER_STATE_UNSPECIFIED", @@ -82,21 +89,47 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="PENDING", index=1, number=2, serialized_options=None, type=None + name="PENDING", + index=1, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="RUNNING", index=2, number=3, serialized_options=None, type=None + name="RUNNING", + index=2, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="SUCCEEDED", index=3, number=4, serialized_options=None, type=None + name="SUCCEEDED", + index=3, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="FAILED", index=4, number=5, serialized_options=None, type=None + name="FAILED", + index=4, + number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="CANCELLED", index=5, number=6, serialized_options=None, type=None + name="CANCELLED", + index=5, + number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, @@ -123,6 +156,7 @@ full_name="google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="MESSAGE_SEVERITY_UNSPECIFIED", @@ -130,15 +164,31 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="INFO", index=1, number=1, serialized_options=None, type=None + name="INFO", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="WARNING", index=2, number=2, serialized_options=None, type=None + name="WARNING", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="ERROR", index=3, number=3, serialized_options=None, type=None + name="ERROR", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, @@ -155,6 +205,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="enable_failure_email", @@ -173,6 +224,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -194,6 +246,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="disable_auto_scheduling", @@ -212,6 +265,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="start_time", @@ -230,6 +284,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="end_time", @@ -248,6 +303,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -269,6 +325,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -279,7 +336,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -287,6 +344,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="destination_dataset_id", @@ -297,7 +355,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -305,6 +363,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="display_name", @@ -315,7 +374,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -323,6 +382,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="data_source_id", @@ -333,7 +393,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -341,6 +401,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="params", @@ -359,6 +420,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="schedule", @@ -369,7 +431,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -377,6 +439,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="schedule_options", @@ -395,6 +458,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="data_refresh_window_days", @@ -413,6 +477,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="disabled", @@ -431,6 +496,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="update_time", @@ -447,8 +513,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="next_run_time", @@ -465,8 +532,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="state", @@ -483,8 +551,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="user_id", @@ -503,6 +572,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="dataset_region", @@ -513,14 +583,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="notification_pubsub_topic", @@ -531,7 +602,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -539,6 +610,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="email_preferences", @@ -557,14 +629,13 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=_b( - "\352A\265\001\n2bigquerydatatransfer.googleapis.com/TransferConfig\0224projects/{project}/transferConfigs/{transfer_config}\022Iprojects/{project}/locations/{location}/transferConfigs/{transfer_config}" - ), + serialized_options=b"\352A\265\001\n2bigquerydatatransfer.googleapis.com/TransferConfig\0224projects/{project}/transferConfigs/{transfer_config}\022Iprojects/{project}/locations/{location}/transferConfigs/{transfer_config}", is_extendable=False, syntax="proto3", extension_ranges=[], @@ -574,6 +645,7 @@ full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.destination", index=0, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[], ) ], @@ -588,6 +660,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -598,7 +671,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -606,6 +679,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="schedule_time", @@ -624,6 +698,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="run_time", @@ -642,6 +717,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="error_status", @@ -660,6 +736,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="start_time", @@ -676,8 +753,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="end_time", @@ -694,8 +772,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="update_time", @@ -712,8 +791,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="params", @@ -730,8 +810,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="destination_dataset_id", @@ -742,14 +823,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="data_source_id", @@ -760,14 +842,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="state", @@ -786,6 +869,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="user_id", @@ -804,6 +888,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="schedule", @@ -814,14 +899,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="notification_pubsub_topic", @@ -832,14 +918,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="email_preferences", @@ -856,16 +943,15 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\003"), + serialized_options=b"\340A\003", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=_b( - "\352A\300\001\n'bigquerydatatransfer.googleapis.com/Run\022?projects/{project}/transferConfigs/{transfer_config}/runs/{run}\022Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}" - ), + serialized_options=b"\352A\300\001\n'bigquerydatatransfer.googleapis.com/Run\022?projects/{project}/transferConfigs/{transfer_config}/runs/{run}\022Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}", is_extendable=False, syntax="proto3", extension_ranges=[], @@ -875,6 +961,7 @@ full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.destination", index=0, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[], ) ], @@ -889,6 +976,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="message_time", @@ -907,6 +995,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="severity", @@ -925,6 +1014,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="message_text", @@ -935,7 +1025,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -943,6 +1033,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1027,10 +1118,10 @@ EmailPreferences = _reflection.GeneratedProtocolMessageType( "EmailPreferences", (_message.Message,), - dict( - DESCRIPTOR=_EMAILPREFERENCES, - __module__="google.cloud.bigquery.datatransfer_v1.proto.transfer_pb2", - __doc__="""Represents preferences for sending email notifications for transfer + { + "DESCRIPTOR": _EMAILPREFERENCES, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.transfer_pb2", + "__doc__": """Represents preferences for sending email notifications for transfer run events. Attributes: enable_failure_email: @@ -1038,17 +1129,17 @@ failures. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.EmailPreferences) - ), + }, ) _sym_db.RegisterMessage(EmailPreferences) ScheduleOptions = _reflection.GeneratedProtocolMessageType( "ScheduleOptions", (_message.Message,), - dict( - DESCRIPTOR=_SCHEDULEOPTIONS, - __module__="google.cloud.bigquery.datatransfer_v1.proto.transfer_pb2", - __doc__="""Options customizing the data transfer schedule. + { + "DESCRIPTOR": _SCHEDULEOPTIONS, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.transfer_pb2", + "__doc__": """Options customizing the data transfer schedule. Attributes: disable_auto_scheduling: If true, automatic scheduling of data transfer runs for this @@ -1070,17 +1161,17 @@ trigerred manually is not limited by this option. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleOptions) - ), + }, ) _sym_db.RegisterMessage(ScheduleOptions) TransferConfig = _reflection.GeneratedProtocolMessageType( "TransferConfig", (_message.Message,), - dict( - DESCRIPTOR=_TRANSFERCONFIG, - __module__="google.cloud.bigquery.datatransfer_v1.proto.transfer_pb2", - __doc__="""Represents a data transfer configuration. A transfer configuration + { + "DESCRIPTOR": _TRANSFERCONFIG, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.transfer_pb2", + "__doc__": """Represents a data transfer configuration. A transfer configuration contains all metadata needed to perform a data transfer. For example, ``destination_dataset_id`` specifies where data should be stored. When a new transfer configuration is created, the specified @@ -1091,11 +1182,11 @@ The resource name of the transfer config. Transfer config names have the form of ``projects/{project_id}/locations/{regi on}/transferConfigs/{config_id}``. The name is automatically - generated based on the config\_id specified in - CreateTransferConfigRequest along with project\_id and region. - If config\_id is not provided, usually a uuid, even though it + generated based on the config_id specified in + CreateTransferConfigRequest along with project_id and region. + If config_id is not provided, usually a uuid, even though it is not guaranteed or required, will be generated for - config\_id. + config_id. destination: The desination of the transfer config. destination_dataset_id: @@ -1115,9 +1206,9 @@ of month 15:30``, ``every wed,fri of jan,jun 13:15``, and ``first sunday of quarter 00:00``. See more explanation about the format here: https://cloud.google.com/appengine/docs/flexi - ble/python/scheduling-jobs-with-cron- - yaml#the\_schedule\_format NOTE: the granularity should be at - least 8 hours, or less frequent. + ble/python/scheduling-jobs-with-cron-yaml#the_schedule_format + NOTE: the granularity should be at least 8 hours, or less + frequent. schedule_options: Options customizing the data transfer schedule. data_refresh_window_days: @@ -1151,17 +1242,17 @@ transfer config. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.TransferConfig) - ), + }, ) _sym_db.RegisterMessage(TransferConfig) TransferRun = _reflection.GeneratedProtocolMessageType( "TransferRun", (_message.Message,), - dict( - DESCRIPTOR=_TRANSFERRUN, - __module__="google.cloud.bigquery.datatransfer_v1.proto.transfer_pb2", - __doc__="""Represents a data transfer run. + { + "DESCRIPTOR": _TRANSFERRUN, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.transfer_pb2", + "__doc__": """Represents a data transfer run. Attributes: name: The resource name of the transfer run. Transfer run names have @@ -1202,7 +1293,7 @@ was created as part of a regular schedule. For batch transfer runs that are scheduled manually, this is empty. NOTE: the system might choose to delay the schedule depending on the - current load, so ``schedule_time`` doesn't always match this. + current load, so ``schedule_time`` doesn’t always match this. notification_pubsub_topic: Output only. Pub/Sub topic where a notification will be sent after this transfer run finishes @@ -1212,17 +1303,17 @@ the transfer config this run was derived from. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.TransferRun) - ), + }, ) _sym_db.RegisterMessage(TransferRun) TransferMessage = _reflection.GeneratedProtocolMessageType( "TransferMessage", (_message.Message,), - dict( - DESCRIPTOR=_TRANSFERMESSAGE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.transfer_pb2", - __doc__="""Represents a user facing message for a particular data transfer run. + { + "DESCRIPTOR": _TRANSFERMESSAGE, + "__module__": "google.cloud.bigquery_datatransfer_v1.proto.transfer_pb2", + "__doc__": """Represents a user facing message for a particular data transfer run. Attributes: message_time: Time when message was logged. @@ -1232,7 +1323,7 @@ Message text. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.TransferMessage) - ), + }, ) _sym_db.RegisterMessage(TransferMessage) diff --git a/packages/google-cloud-bigquery-datatransfer/noxfile.py b/packages/google-cloud-bigquery-datatransfer/noxfile.py index b0f07cc7a45b..5b7e179d5ef0 100644 --- a/packages/google-cloud-bigquery-datatransfer/noxfile.py +++ b/packages/google-cloud-bigquery-datatransfer/noxfile.py @@ -26,11 +26,12 @@ BLACK_VERSION = "black==19.3b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -if os.path.exists("samples"): - BLACK_PATHS.append("samples") +DEFAULT_PYTHON_VERSION = "3.7" +SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.7"] +UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): """Run linters. @@ -56,7 +57,7 @@ def blacken(session): session.run("black", *BLACK_PATHS) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments") @@ -84,13 +85,13 @@ def default(session): ) -@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=["2.7", "3.7"]) +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") @@ -110,8 +111,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest") - session.install("git+https://github.com/googleapis/python-test-utils") + session.install("mock", "pytest", "google-cloud-testutils") session.install("-e", ".") # Run py.test against the system tests. @@ -138,7 +138,7 @@ def samples(session): session.run("py.test", "--quiet", "samples", *session.posargs) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. @@ -151,12 +151,12 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") + session.install("sphinx<3.0", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/decrypt-secrets.sh b/packages/google-cloud-bigquery-datatransfer/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..ff599eb2af25 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/decrypt-secrets.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + > testing/client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/readme_gen.py b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..d309d6e97518 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/readme_gen.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..a0406dba8c84 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-bigquery-datatransfer/synth.metadata b/packages/google-cloud-bigquery-datatransfer/synth.metadata index 7af3f2e9e642..5a1fdaf845f3 100644 --- a/packages/google-cloud-bigquery-datatransfer/synth.metadata +++ b/packages/google-cloud-bigquery-datatransfer/synth.metadata @@ -1,32 +1,25 @@ { "sources": [ - { - "generator": { - "name": "artman", - "version": "2.0.0", - "dockerImage": "googleapis/artman@sha256:b3b47805231a305d0f40c4bf069df20f6a2635574e6d4259fac651d3f9f6e098" - } - }, { "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-datatransfer.git", - "sha": "d10fd6dc77c1987983ada5585a632e7de4d10c44" + "sha": "c215fd155ee109479c5697d7c31adcd06cdac86d" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "cdf13efacdea0649e940452f9c5d320b93735974", - "internalRef": "306783437" + "sha": "eafa840ceec23b44a5c21670288107c661252711", + "internalRef": "313488995" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6980131905b652563280e4d2482384d4acc9eafc" + "sha": "4e1d2cb79b02d7496b1452f91c518630c207145e" } } ], @@ -37,8 +30,7 @@ "apiName": "bigquery_datatransfer", "apiVersion": "v1", "language": "python", - "generator": "gapic", - "config": "google/cloud/bigquery/datatransfer/artman_bigquerydatatransfer.yaml" + "generator": "bazel" } } ] diff --git a/packages/google-cloud-bigquery-datatransfer/synth.py b/packages/google-cloud-bigquery-datatransfer/synth.py index 1e6dc8f3153f..da01bcf49e0a 100644 --- a/packages/google-cloud-bigquery-datatransfer/synth.py +++ b/packages/google-cloud-bigquery-datatransfer/synth.py @@ -63,4 +63,7 @@ templated_files = common.py_library(unit_cov_level=79, cov_level=79, samples_test=True) s.move(templated_files) +# TODO(busunkim): Use latest sphinx after microgenerator transition +s.replace("noxfile.py", '"sphinx"', '"sphinx<3.0.0"') + s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-bigquery-datatransfer/testing/.gitignore b/packages/google-cloud-bigquery-datatransfer/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py index a24f06476e97..c3883fb5f6fc 100644 --- a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py @@ -64,6 +64,108 @@ class CustomException(Exception): class TestDataTransferServiceClient(object): + def test_delete_transfer_config(self): + channel = ChannelStub() + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Setup Request + name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") + + client.delete_transfer_config(name) + + assert len(channel.requests) == 1 + expected_request = datatransfer_pb2.DeleteTransferConfigRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_transfer_config_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Setup request + name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") + + with pytest.raises(CustomException): + client.delete_transfer_config(name) + + def test_delete_transfer_run(self): + channel = ChannelStub() + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Setup Request + name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") + + client.delete_transfer_run(name) + + assert len(channel.requests) == 1 + expected_request = datatransfer_pb2.DeleteTransferRunRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_transfer_run_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Setup request + name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") + + with pytest.raises(CustomException): + client.delete_transfer_run(name) + + def test_check_valid_creds(self): + # Setup Expected Response + has_valid_creds = False + expected_response = {"has_valid_creds": has_valid_creds} + expected_response = datatransfer_pb2.CheckValidCredsResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Setup Request + name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") + + response = client.check_valid_creds(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datatransfer_pb2.CheckValidCredsRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_check_valid_creds_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Setup request + name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") + + with pytest.raises(CustomException): + client.check_valid_creds(name) + def test_get_data_source(self): # Setup Expected Response name_2 = "name2-1052831874" @@ -298,37 +400,6 @@ def test_update_transfer_config_exception(self): with pytest.raises(CustomException): client.update_transfer_config(transfer_config, update_mask) - def test_delete_transfer_config(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - client.delete_transfer_config(name) - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.DeleteTransferConfigRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_transfer_config_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - with pytest.raises(CustomException): - client.delete_transfer_config(name) - def test_get_transfer_config(self): # Setup Expected Response name_2 = "name2-1052831874" @@ -563,37 +634,6 @@ def test_get_transfer_run_exception(self): with pytest.raises(CustomException): client.get_transfer_run(name) - def test_delete_transfer_run(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - client.delete_transfer_run(name) - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.DeleteTransferRunRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_transfer_run_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - with pytest.raises(CustomException): - client.delete_transfer_run(name) - def test_list_transfer_runs(self): # Setup Expected Response next_page_token = "" @@ -689,43 +729,3 @@ def test_list_transfer_logs_exception(self): paged_list_response = client.list_transfer_logs(parent) with pytest.raises(CustomException): list(paged_list_response) - - def test_check_valid_creds(self): - # Setup Expected Response - has_valid_creds = False - expected_response = {"has_valid_creds": has_valid_creds} - expected_response = datatransfer_pb2.CheckValidCredsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") - - response = client.check_valid_creds(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.CheckValidCredsRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_check_valid_creds_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") - - with pytest.raises(CustomException): - client.check_valid_creds(name)